[python-shapely] 02/08: Imported Upstream version 1.6~a1

Bas Couwenberg sebastic at debian.org
Wed Sep 14 17:25:43 UTC 2016


This is an automated email from the git hooks/post-receive script.

sebastic pushed a commit to branch master
in repository python-shapely.

commit d302b486be1dd1a4399587d3d64fd3c6faf501ce
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date:   Wed Sep 14 18:17:42 2016 +0200

    Imported Upstream version 1.6~a1
---
 .dockerignore                            |   5 +
 .gitignore                               |   5 +
 .travis.yml                              |  24 +-
 CHANGES.txt                              |  37 ++
 CITATION.txt                             |  10 +
 CODE_OF_CONDUCT.md                       |  22 +
 CREDITS.txt                              |  40 +-
 Dockerfile.wheels                        |  23 +
 MANIFEST.in                              |   4 +-
 README.rst                               |   3 +
 build-linux-wheels.sh                    |  24 +
 docs/code/minimum_rotated_rectangle.py   |  49 ++
 docs/manual.rst                          | 130 +++++-
 packaging-15.3.dist-info/DESCRIPTION.rst | 121 +++++
 packaging-15.3.dist-info/METADATA        | 141 ++++++
 packaging-15.3.dist-info/RECORD          |  13 +
 packaging-15.3.dist-info/WHEEL           |   6 +
 packaging-15.3.dist-info/metadata.json   |   1 +
 packaging-15.3.dist-info/pbr.json        |   1 +
 packaging-15.3.dist-info/top_level.txt   |   1 +
 packaging/__about__.py                   |  21 +
 packaging/__init__.py                    |  14 +
 packaging/_compat.py                     |  30 ++
 packaging/_structures.py                 |  68 +++
 packaging/markers.py                     | 287 ++++++++++++
 packaging/requirements.py                | 127 +++++
 packaging/specifiers.py                  | 774 +++++++++++++++++++++++++++++++
 packaging/utils.py                       |  14 +
 packaging/version.py                     | 393 ++++++++++++++++
 requirements-dev.txt                     |   2 +-
 setup.cfg                                |   2 +
 setup.py                                 | 160 +++++--
 shapely/__init__.py                      |   2 +-
 shapely/_buildcfg.py                     |   2 +-
 shapely/algorithms/polylabel.py          | 128 +++++
 shapely/ctypes_declarations.py           | 134 +++---
 shapely/geometry/__init__.py             |   3 +
 shapely/geometry/base.py                 |  61 ++-
 shapely/geometry/linestring.py           |   9 +-
 shapely/geometry/polygon.py              |  27 +-
 shapely/geos.py                          |  86 ++--
 shapely/impl.py                          |   1 +
 shapely/ops.py                           | 176 ++++++-
 shapely/speedups/__init__.py             |  33 +-
 shapely/speedups/_speedups.pyx           |  10 +-
 shapely/vectorized/_vectorized.pyx       |  12 +-
 tests/conftest.py                        |   8 +
 tests/test_emptiness.py                  |  10 +
 tests/test_minimum_rotated_rectangle.py  |  33 ++
 tests/test_operations.py                 |  13 +-
 tests/test_polygon.py                    |  12 +
 tests/test_polylabel.py                  |  63 +++
 tests/test_shared_paths.py               |  50 ++
 tests/test_split.py                      | 192 ++++++++
 tests/test_vectorized.py                 |   8 +-
 55 files changed, 3406 insertions(+), 219 deletions(-)

diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..fc3788b
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,5 @@
+# The wheel-building image only depends on requirements-dev.txt. Ignore all
+# other files.
+*
+.*
+!requirements-dev.txt
diff --git a/.gitignore b/.gitignore
index e2e2f2b..e29fcdd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,3 +15,8 @@ docs/modules.txt
 .idea/
 *.pyd
 *.pdb
+wheels/
+.coverage
+ignore/
+
+venv/
diff --git a/.travis.yml b/.travis.yml
index 5d36913..185a625 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,27 +1,37 @@
 language: python
+sudo: false
+cache: pip
 
 python:
   - "2.6"
   - "2.7"
   - "3.3"
   - "3.4"
+  - "3.5"
 
 env:
   - "TRAVIS_SPEEDUP_OPTS=--with-speedups"
-  - "TRAVIS_SPEEDUP_OPTS="
+  - "TRAVIS_SPEEDUP_OPTS=--without-speedups"
+
+addons:
+  apt:
+    packages:
+      - libgeos-dev
+      - python-numpy
 
 before_install:
-  - sudo add-apt-repository -y ppa:ubuntugis/ppa
-  - sudo apt-get update -qq
-  - sudo apt-get install -qq libgeos-dev python-numpy
   - if [[ $TRAVIS_PYTHON_VERSION == "2.6" ]]; then pip install unittest2; fi
+  - pip install pip setuptools --upgrade
   - pip install --install-option="--no-cython-compile" cython
-  - pip install -r requirements-dev.txt
 
 install:
-  - pip install -e .
+  - pip install -e .[test]
+  - pip install coveralls
 
-script: "py.test tests ${TRAVIS_SPEEDUP_OPTS}"
+script: "py.test --cov shapely --cov-report term-missing ${TRAVIS_SPEEDUP_OPTS}"
 
+after_success:
+  - coveralls || echo "!! intermittent coveralls failure"
+  
 notifications:
     email: false
diff --git a/CHANGES.txt b/CHANGES.txt
index 44b4515..737d30c 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,6 +1,43 @@
 Changes
 =======
 
+1.6a1 (2016-09-14)
+------------------
+
+New features:
+
+- A new error derived from NotImplementedError, with a more useful message, is
+  raised when the GEOS backend doesn't support a called method (#216).
+- The ``project()`` method of LineString has been extended to LinearRing
+  geometries (#286).
+- A new ``minimum_rotated_rectangle`` attribute has been added to the base
+  geometry class (#354).
+- A new ``shapely.ops.polylabel()`` function has been added. It
+  computes a point suited for labeling concave polygons (#395).
+- A new ``shapely.ops.split()`` function has been added. It splits a
+  geometry by another geometry of lesser dimension: polygon by line, line by
+  point (#293, #371).
+- ``Polygon.from_bounds()`` constructs a Polygon from bounding coordinates
+  (#392).
+- Support for testing with Numpy 1.4.1 has been added (#301).
+- Support creating all kinds of empty geometries from empty lists of Python
+  objects (#397, #404).
+
+Refactoring:
+
+- Switch from ``SingleSidedBuffer()`` to ``OffsetCurve()`` for GEOS >= 3.3
+  (#270).
+- Cython speedups are now enabled by defualt (#252).
+
+Packaging:
+
+- Packaging 16.7, a setup dependency, is vendorized (#314).
+- Infrastructure for building manylinux1 wheels has been added (#391).
+- The system's ``geos-config`` program is now only checked when ``setup.py``
+  is executed, never during normal use of the module (#244).
+- Added new library search paths to assist PyInstaller (#382) and Windows
+  (#343).
+
 1.5.17 (2016-08-31)
 -------------------
 - Bug fix: eliminate memory leak in geom_factory() (#408).
diff --git a/CITATION.txt b/CITATION.txt
new file mode 100644
index 0000000..b3ea848
--- /dev/null
+++ b/CITATION.txt
@@ -0,0 +1,10 @@
+If you use Shapely for any published work, please cite it using the reference
+below:
+
+ at Misc{,
+  author =    {Sean Gillies and others},
+  organization = {toblerity.org},
+  title =     {Shapely: manipulation and analysis of geometric objects},
+  year =      {2007--},
+  url = "https://github.com/Toblerity/Shapely"
+}
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..01b8644
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,22 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery
+* Personal attacks
+* Trolling or insulting/derogatory comments
+* Public or private harassment
+* Publishing other's private information, such as physical or electronic addresses, without explicit permission
+* Other unethical or unprofessional conduct.
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team.
+
+This code of conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers.
+
+This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
diff --git a/CREDITS.txt b/CREDITS.txt
index 853c953..abca549 100644
--- a/CREDITS.txt
+++ b/CREDITS.txt
@@ -5,44 +5,50 @@ Shapely is written by:
 
 * Sean Gillies <sean.gillies at gmail.com>
 * Oliver Tonnhofer <olt at bogosoft.com>
-* Mike Toews <mwtoews at gmail.com>
 * Joshua Arnott <josh at snorfalorpagus.net>
+* Mike Toews <mwtoews at gmail.com>
 * Jacob Wasserman <jwasserman at gmail.com>
-* Aron Bierbaum <aronbierbaum at gmail.com>
+* aronbierbaum <aronbierbaum at b426a367-1105-0410-b9ff-cdf4ab011145>
+* Allan Adair <allan at rfspot.com>
 * Johannes Schönberger <jschoenberger at demuc.de>
+* georgeouzou <geothrock at gmail.com>
 * Phil Elson <pelson.pub at gmail.com>
 * Howard Butler <hobu.inc at gmail.com>
 * dokai <dokai at b426a367-1105-0410-b9ff-cdf4ab011145>
 * Gabi Davar <grizzly.nyo at gmail.com>
+* Kevin Wurster <kevin at skytruth.org>
 * Kelsey Jordahl <kjordahl at enthought.com>
 * Dave Collins <dave at hopest.net>
+* fredj <frederic.junod at camptocamp.com>
+* Brad Hards <bradh at frogmouth.net>
+* David Baumgold <david at davidbaumgold.com>
+* Henry Walshaw <henry.walshaw at gmail.com>
 * Jinkun Wang <mejkunw at gmail.com>
 * Marc Jansen <jansen at terrestris.de>
-* Henry Walshaw <henry.walshaw at gmail.com>
-* David Baumgold <david at davidbaumgold.com>
 * Sampo Syrjanen <sampo.syrjanen at here.com>
 * Steve M. Kim <steve at climate.com>
 * Thomas Kluyver <takowl at gmail.com>
-* Brad Hards <bradh at frogmouth.net>
-* Allan Adair <allan.m.adair at gmail.com>
-* fredj <frederic.junod at camptocamp.com>
+* Morris Tweed <tweed.morris at gmail.com>
 * Naveen Michaud-Agrawal <naveen.michaudagrawal at gmail.com>
-* Peter Sagerson <psagers.github at ignorare.net>
-* BertrandGervais <bertrand.gervais.pro at gmail.com>
 * Jeethu Rao <jeethu at jeethurao.com>
+* Peter Sagerson <psagers.github at ignorare.net>
 * Jason Sanford <jason.sanford at mapmyfitness.com>
-* Brandon Wood <btwood at geometeor.com>
-* Stephan Hügel <urschrei at gmail.com>
-* Johan Euphrosine <proppy at aminche.com>
-* mindw <grizzly.nyo at gmail.com>
+* Jamie Hall <jamie1212 at gmail.com>
 * James Spencer <james.s.spencer at gmail.com>
+* Stephan Hügel <urschrei at gmail.com>
 * Benjamin Root <ben.v.root at gmail.com>
-* Leandro Lima <leandro at limaesilva.com.br>
-* Maarten Vermeyen <maarten.vermeyen at rwo.vlaanderen.be>
 * Tobias Sauerwein <tobias.sauerwein at camptocamp.com>
-* James Douglass <jamesdouglassusa at gmail.com>
-* Morris Tweed <tweed.morris at gmail.com>]
 * WANG Aiyong <gepcelway at gmail.com>
+* James Douglass <jamesdouglassusa at gmail.com>
+* Brandon Wood <btwood at geometeor.com>
+* Andy Freeland <andy at andyfreeland.net>
+* BertrandGervais <bertrand.gervais.pro at gmail.com>
+* giumas <gmasetti at ccom.unh.edu>
+* Leandro Lima <leandro at limaesilva.com.br>
+* Maarten Vermeyen <maarten.vermeyen at rwo.vlaanderen.be>
+* joelostblom <joelostblom at users.noreply.github.com>
+* Marco De Nadai <me at marcodena.it>
+* Johan Euphrosine <proppy at aminche.com>
 
 See also: https://github.com/Toblerity/Shapely/graphs/contributors.
 
diff --git a/Dockerfile.wheels b/Dockerfile.wheels
new file mode 100644
index 0000000..ff43952
--- /dev/null
+++ b/Dockerfile.wheels
@@ -0,0 +1,23 @@
+FROM quay.io/pypa/manylinux1_x86_64
+
+ENV GEOS_VERSION 3.5.0
+
+# Install geos
+RUN mkdir -p /src \
+    && cd /src \
+    && curl -f -L -O http://download.osgeo.org/geos/geos-$GEOS_VERSION.tar.bz2 \
+    && tar jxf geos-$GEOS_VERSION.tar.bz2 \
+    && cd /src/geos-$GEOS_VERSION \
+    && ./configure \
+    && make \
+    && make install \
+    && rm -rf /src
+
+# Bake dev requirements into the Docker image for faster builds
+ADD requirements-dev.txt /tmp/requirements-dev.txt
+RUN for PYBIN in /opt/python/*/bin; do \
+        $PYBIN/pip install -r /tmp/requirements-dev.txt ; \
+    done
+
+WORKDIR /io
+CMD ["/io/build-linux-wheels.sh"]
diff --git a/MANIFEST.in b/MANIFEST.in
index f542522..2903f9e 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -6,9 +6,11 @@ prune DLLs_x86
 exclude *.txt
 exclude MANIFEST.in
 include CHANGES.txt CREDITS.txt LICENSE.txt README.rst VERSION.txt
+recursive-include packaging *.py
+recursive-include packaging-15.3.dist-info *
 recursive-include tests *.py *.txt
 recursive-include shapely/examples *.py
 recursive-include shapely/speedups *.pyx
 recursive-include shapely/vectorized *.pyx
-include shapely/_geos.pxi
+recursive-include shapely *.pxi
 include docs/*.rst
diff --git a/README.rst b/README.rst
index f80bba3..8bbbff9 100644
--- a/README.rst
+++ b/README.rst
@@ -7,6 +7,9 @@ Manipulation and analysis of geometric objects in the Cartesian plane.
 .. image:: https://travis-ci.org/Toblerity/Shapely.png?branch=master
    :target: https://travis-ci.org/Toblerity/Shapely
 
+.. image:: https://coveralls.io/repos/github/Toblerity/Shapely/badge.svg?branch=master
+   :target: https://coveralls.io/github/Toblerity/Shapely?branch=master
+
 .. image:: http://farm3.staticflickr.com/2738/4511827859_b5822043b7_o_d.png
    :width: 800
    :height: 400
diff --git a/build-linux-wheels.sh b/build-linux-wheels.sh
new file mode 100755
index 0000000..d56a2ae
--- /dev/null
+++ b/build-linux-wheels.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+set -eu
+
+# Checking for /.dockerenv is a hacky way to determine whether or not we're
+# already running in a Docker container. Note that this is not guaranteed to
+# exist in all versions and drivers and may need to be changed later.
+if [ ! -e /.dockerenv ]; then
+    docker build -f Dockerfile.wheels --pull -t shapely-wheelbuilder .
+    exec docker run -v `pwd`:/io shapely-wheelbuilder "$@"
+fi
+
+ORIGINAL_PATH=$PATH
+UNREPAIRED_WHEELS=/tmp/wheels
+
+# Compile wheels
+for PYBIN in /opt/python/*/bin; do
+    PATH=${PYBIN}:$ORIGINAL_PATH
+    python setup.py bdist_wheel -d ${UNREPAIRED_WHEELS}
+done
+
+# Bundle GEOS into the wheels
+for whl in ${UNREPAIRED_WHEELS}/*.whl; do
+    auditwheel repair ${whl} -w wheels
+done
diff --git a/docs/code/minimum_rotated_rectangle.py b/docs/code/minimum_rotated_rectangle.py
new file mode 100644
index 0000000..7eb672c
--- /dev/null
+++ b/docs/code/minimum_rotated_rectangle.py
@@ -0,0 +1,49 @@
+from shapely.geometry import MultiPoint, Polygon, LineString
+import matplotlib.pyplot as plt
+from descartes.patch import PolygonPatch
+
+from figures import SIZE
+
+fig = plt.figure(1, figsize=SIZE, dpi=90)
+fig.set_frameon(True)
+
+# 1
+ax = fig.add_subplot(121)
+
+mp = MultiPoint([(0, 0), (0.5, 1.5), (1, 0.5), (0.5, 0.5)])
+rect = mp.minimum_rotated_rectangle
+
+for p in mp:
+	ax.plot(p.x, p.y, 'o', color='#999999')
+patch = PolygonPatch(rect, facecolor='#6699cc', edgecolor='#6699cc', alpha=0.5, zorder=2)
+ax.add_patch(patch)
+ax.set_title('a) MultiPoint')
+
+xr = [-1, 2]
+yr = [-1, 2]
+ax.set_xlim(*xr)
+ax.set_xticks(range(*xr) + [xr[-1]])
+ax.set_ylim(*yr)
+ax.set_yticks(range(*yr) + [yr[-1]])
+ax.set_aspect(1)
+
+# 2
+ax = fig.add_subplot(122)
+ls = LineString([(-0.5, 1.2), (0.5, 0), (1, 1), (1.5, 0), (1.5, 0.5)])
+rect = ls.minimum_rotated_rectangle
+
+ax.plot(*ls.xy, color='#333333', linewidth=3, alpha=0.5, zorder=2)
+patch = PolygonPatch(rect, facecolor='#6699cc', edgecolor='#6699cc', alpha=0.5, zorder=2)
+ax.add_patch(patch)
+
+xr = [-1, 2]
+yr = [-1, 2]
+ax.set_xlim(*xr)
+ax.set_xticks(range(*xr) + [xr[-1]])
+ax.set_ylim(*yr)
+ax.set_yticks(range(*yr) + [yr[-1]])
+ax.set_aspect(1)
+
+ax.set_title('b) LineString')
+
+plt.show()
diff --git a/docs/manual.rst b/docs/manual.rst
index 07971b8..244c9c0 100644
--- a/docs/manual.rst
+++ b/docs/manual.rst
@@ -207,6 +207,23 @@ General Attributes and Methods
   >>> Point(0,0).distance(Point(1,1))
   1.4142135623730951
 
+.. method:: object.hausdorff_distance(other)
+
+  Returns the Hausdorff distance (``float``) to the `other` geometric object.
+  The Hausdorff distance is the furthest distance from any point on the first
+  geometry to any point on the second geometry.
+
+  `New in Shapely 1.6.0`
+
+.. code-block:: pycon
+
+  >>> point = Point(1, 1)
+  >>> line = LineString([(2, 0), (2, 4), (3, 4)])
+  >>> point.hausdorff_distance(line)
+  3.605551275463989
+  >>> point.distance(Point(3, 4))
+  3.605551275463989
+
 .. method:: object.representative_point()
 
   Returns a cheaply computed point that is guaranteed to be within the
@@ -1122,7 +1139,7 @@ This predicate applies to all types and is the inverse of :meth:`intersects`.
 
 .. method:: object.intersects(other)
 
-  Returns ``True`` if the `boundary` and `interior` of the object intersect in
+  Returns ``True`` if the `boundary` or `interior` of the object intersect in
   any way with those of the other.
 
 In other words, geometric objects intersect if they have any boundary or 
@@ -1559,6 +1576,27 @@ Figure 10. Convex hull (blue) of 2 points (left) and of 6 points (right).
   >>> MultiPoint([(0, 0), (1, 1)]).envelope
   <shapely.geometry.polygon.Polygon object at 0x...>
 
+.. attribute:: object.minimum_rotated_rectangle
+  
+  Returns the general minimum bounding rectangle that contains the object. 
+  Unlike envelope this rectangle is not constrained to be parallel to the 
+  coordinate axes. If the convex hull of the object is a degenerate (line or point) 
+  this degenerate is returned.
+
+  `New in Shapely 1.6.0`
+
+.. code-block:: pycon
+  
+  >>> Point(0, 0).minimum_rotated_rectangle
+  <shapely.geometry.point.Point object at 0x...>
+  >>> MultiPoint([(0,0),(1,1),(2,0.5)]).minimum_rotated_rectangle
+  <shapely.geometry.polygon.Polygon object at 0x...>
+
+.. plot:: code/minimum_rotated_rectangle.py
+
+Figure 11. Minimum rotated rectangle for a multipoint feature (left) and a 
+linestring feature (right).
+
 .. method:: object.parallel_offset(distance, side, resolution=16, join_style=1, mitre_limit=5.0)
 
   Returns a LineString or MultiLineString geometry at a distance from the
@@ -1583,7 +1621,7 @@ Figure 10. Convex hull (blue) of 2 points (left) and of 6 points (right).
 
 .. plot:: code/parallel_offset.py
 
-Figure 11. Three styles of parallel offset lines on the left side of a simple
+Figure 12. Three styles of parallel offset lines on the left side of a simple
 line string (its starting point shown as a circle) and one offset on the right
 side, a multipart.
 
@@ -1591,7 +1629,7 @@ The effect of the `mitre_limit` parameter is shown below.
 
 .. plot:: code/parallel_offset_mitre.py
 
-Figure 12. Large and small mitre_limit values for left and right offsets.
+Figure 13. Large and small mitre_limit values for left and right offsets.
 
 .. method:: object.simplify(tolerance, preserve_topology=True)
 
@@ -1618,7 +1656,7 @@ Douglas-Peucker algorithm [6]_ is used.
 
 .. plot:: code/simplify.py
 
-Figure 13. Simplification of a nearly circular polygon using a tolerance of 0.2
+Figure 14. Simplification of a nearly circular polygon using a tolerance of 0.2
 (left) and 0.5 (right).
 
 .. note::
@@ -1726,7 +1764,7 @@ preserved or supported by 3D affine transformations.
 
   .. plot:: code/rotate.py
 
-  Figure 14. Rotation of a `LineString` (gray) by an angle of 90°
+  Figure 15. Rotation of a `LineString` (gray) by an angle of 90°
   counter-clockwise (blue) using different origins.
 
 .. function:: shapely.affinity.scale(geom, xfact=1.0, yfact=1.0, zfact=1.0, origin='center')
@@ -1768,7 +1806,7 @@ preserved or supported by 3D affine transformations.
 
   .. plot:: code/scale.py
 
-  Figure 15. Scaling of a gray triangle to blue result: a) by a factor of 1.5
+  Figure 16. Scaling of a gray triangle to blue result: a) by a factor of 1.5
   along x-direction, with reflection across y-axis; b) by a factor of 2 along
   x-direction with custom origin at (1, 1).
 
@@ -1800,7 +1838,7 @@ preserved or supported by 3D affine transformations.
 
   .. plot:: code/skew.py
 
-  Figure 16. Skewing of a gray "R" to blue result: a) by a shear angle of 20°
+  Figure 17. Skewing of a gray "R" to blue result: a) by a shear angle of 20°
   along the x-direction and an origin at (1, 1); b) by a shear angle of 30°
   along the y-direction, using default origin.
 
@@ -2080,6 +2118,65 @@ one geometry to the vertices in a second geometry with a given tolerance.
   >>> result.wkt
   'LINESTRING (0 0, 1 1, 2 1, 2.6 0.5)'
 
+Shared paths
+------------
+
+The :func:`~shapely.ops.shared_paths` function in `shapely.ops` finds the shared
+paths between two lineal geometries.
+
+.. function:: shapely.ops.shared_paths(geom1, geom2)
+
+   Finds the shared paths between `geom1` and `geom2`, where both geometries
+   are `LineStrings`.
+   
+   A `GeometryCollection` is returned with two elements. The first element is a
+   `MultiLineString` containing shared paths with the same direction for both
+   inputs. The second element is a MultiLineString containing shared paths with
+   the opposite direction for the two inputs.
+   
+   `New in version 1.6.0`
+
+.. code-block:: pycon
+
+  >>> from shapely.ops import shared_paths
+  >>> g1 = LineString([(0, 0), (10, 0), (10, 5), (20, 5)])
+  >>> g2 = LineString([(5, 0), (30, 0), (30, 5), (0, 5)])
+  >>> forward, backward = shared_paths(g1, g2)
+  >>> forward.wkt
+  'MULTILINESTRING ((5 0, 10 0))'
+  >>> backward.wkt
+  'MULTILINESTRING ((10 5, 20 5))'
+
+Splitting
+---------
+
+The :func:`~shapely.ops.split` function in `shapely.ops` splits a geometry by another geometry.
+
+.. function:: shapely.ops.split(geom, splitter)
+
+   Splits a geometry by another geometry and returns a collection of geometries. This function is the theoretical
+   opposite of the union of the split geometry parts. If the splitter does not split the geometry, a  collection with a single geometry equal to the input geometry is returned.
+  
+   The function supports:
+
+   * Splitting a (Multi)LineString by a (Multi)Point or (Multi)LineString or (Multi)Polygon boundary
+   
+   * Splitting a (Multi)Polygon by a LineString
+
+   It may be convenient to snap the splitter with low tolerance to the geometry. For example in the case of splitting a line by a point, the point must be exactly on the line, for the line to be correctly split.
+   When splitting a line by a polygon, the boundary of the polygon is used for the operation.
+   When splitting a line by another line, a ValueError is raised if the two overlap at some segment.
+
+   `New in version 1.6.0`
+
+.. code-block:: pycon
+
+  >>> pt = Point((1, 1))
+  >>> line = LineString([(0,0), (2,2)])
+  >>> result = split(line, pt)
+  >>> result.wkt
+  'GEOMETRYCOLLECTION (LINESTRING (0 0, 1 1), LINESTRING (1 1, 2 2))'
+
 Prepared Geometry Operations
 ----------------------------
 
@@ -2370,13 +2467,12 @@ involves some overhead that might slow down your code.
 .. versionadded:: 1.2.10
 
 The :mod:`shapely.speedups` module contains performance enhancements written in
-C. They are automaticaly installed when Python has access to a compiler and
-GEOS development headers during installation.
+C. They are automatically installed when Python has access to a compiler and
+GEOS development headers during installation. 
 
 You can check if the speedups are installed with the :attr:`available`
-attribute. The constructor speedups are disabled by default. To enable the
-speedups call :func:`enable`. You can revert to the default implementation with
-:func:`disable`.
+attribute. To enable the speedups call :func:`enable`. You can revert to the
+default implementation with :func:`disable`.
 
 .. code-block:: pycon
 
@@ -2385,6 +2481,16 @@ speedups call :func:`enable`. You can revert to the default implementation with
   True
   >>> speedups.enable()
 
+.. versionadded:: 1.6.0
+
+Speedups are now enabled by default if they are available. You can check if
+speedups are enabled with the :attr:`enabled` attribute.
+
+.. code-block:: pycon
+
+  >>> from shapely import speedups
+  >>> speedups.enabled
+  True
 
 Conclusion
 ==========
diff --git a/packaging-15.3.dist-info/DESCRIPTION.rst b/packaging-15.3.dist-info/DESCRIPTION.rst
new file mode 100644
index 0000000..e373b33
--- /dev/null
+++ b/packaging-15.3.dist-info/DESCRIPTION.rst
@@ -0,0 +1,121 @@
+packaging
+=========
+
+Core utilities for Python packages
+
+
+Documentation
+-------------
+
+`documentation`_
+
+
+Discussion
+----------
+
+If you run into bugs, you can file them in our `issue tracker`_.
+
+You can also join ``#pypa`` on Freenode to ask questions or get involved.
+
+
+.. _`documentation`: https://packaging.pypa.io/
+.. _`issue tracker`: https://github.com/pypa/packaging/issues
+
+Changelog
+---------
+
+15.3 - 2015-08-01
+~~~~~~~~~~~~~~~~~
+
+* Normalize post-release spellings for rev/r prefixes. :issue:`35`
+
+
+15.2 - 2015-05-13
+~~~~~~~~~~~~~~~~~
+
+* Fix an error where the arbitary specifier (``===``) was not correctly
+  allowing pre-releases when it was being used.
+
+* Expose the specifier and version parts through properties on the
+  ``Specifier`` classes.
+
+* Allow iterating over the ``SpecifierSet`` to get access to all of the
+  ``Specifier`` instances.
+
+* Allow testing if a version is contained within a specifier via the ``in``
+  operator.
+
+
+15.1 - 2015-04-13
+~~~~~~~~~~~~~~~~~
+
+* Fix a logic error that was causing inconsistent answers about whether or not
+  a pre-release was contained within a ``SpecifierSet`` or not.
+
+
+15.0 - 2015-01-02
+~~~~~~~~~~~~~~~~~
+
+* Add ``Version().is_postrelease`` and ``LegacyVersion().is_postrelease`` to
+  make it easy to determine if a release is a post release.
+
+* Add ``Version().base_version`` and ``LegacyVersion().base_version`` to make
+  it easy to get the public version without any pre or post release markers.
+
+* Support the update to PEP 440 which removed the implied ``!=V.*`` when using
+  either ``>V`` or ``<V`` and which instead special cased the handling of
+  pre-releases, post-releases, and local versions when using ``>V`` or ``<V``.
+
+
+14.5 - 2014-12-17
+~~~~~~~~~~~~~~~~~
+
+* Normalize release candidates as ``rc`` instead of ``c``.
+
+* Expose the ``VERSION_PATTERN`` constant, a regular expression matching
+  a valid version.
+
+
+14.4 - 2014-12-15
+~~~~~~~~~~~~~~~~~
+
+* Ensure that versions are normalized before comparison when used in a
+  specifier with a less than (``<``) or greater than (``>``) operator.
+
+
+14.3 - 2014-11-19
+~~~~~~~~~~~~~~~~~
+
+* **BACKWARDS INCOMPATIBLE** Refactor specifier support so that it can sanely
+  handle legacy specifiers as well as PEP 440 specifiers.
+
+* **BACKWARDS INCOMPATIBLE** Move the specifier support out of
+  ``packaging.version`` into ``packaging.specifiers``.
+
+
+14.2 - 2014-09-10
+~~~~~~~~~~~~~~~~~
+
+* Add prerelease support to ``Specifier``.
+* Remove the ability to do ``item in Specifier()`` and replace it with
+  ``Specifier().contains(item)`` in order to allow flags that signal if a
+  prerelease should be accepted or not.
+* Add a method ``Specifier().filter()`` which will take an iterable and returns
+  an iterable with items that do not match the specifier filtered out.
+
+
+14.1 - 2014-09-08
+~~~~~~~~~~~~~~~~~
+
+* Allow ``LegacyVersion`` and ``Version`` to be sorted together.
+* Add ``packaging.version.parse()`` to enable easily parsing a version string
+  as either a ``Version`` or a ``LegacyVersion`` depending on it's PEP 440
+  validity.
+
+
+14.0 - 2014-09-05
+~~~~~~~~~~~~~~~~~
+
+* Initial release.
+
+
diff --git a/packaging-15.3.dist-info/METADATA b/packaging-15.3.dist-info/METADATA
new file mode 100644
index 0000000..a353e5a
--- /dev/null
+++ b/packaging-15.3.dist-info/METADATA
@@ -0,0 +1,141 @@
+Metadata-Version: 2.0
+Name: packaging
+Version: 15.3
+Summary: Core utilities for Python packages
+Home-page: https://github.com/pypa/packaging
+Author: Donald Stufft
+Author-email: donald at stufft.io
+License: Apache License, Version 2.0
+Platform: UNKNOWN
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+
+packaging
+=========
+
+Core utilities for Python packages
+
+
+Documentation
+-------------
+
+`documentation`_
+
+
+Discussion
+----------
+
+If you run into bugs, you can file them in our `issue tracker`_.
+
+You can also join ``#pypa`` on Freenode to ask questions or get involved.
+
+
+.. _`documentation`: https://packaging.pypa.io/
+.. _`issue tracker`: https://github.com/pypa/packaging/issues
+
+Changelog
+---------
+
+15.3 - 2015-08-01
+~~~~~~~~~~~~~~~~~
+
+* Normalize post-release spellings for rev/r prefixes. :issue:`35`
+
+
+15.2 - 2015-05-13
+~~~~~~~~~~~~~~~~~
+
+* Fix an error where the arbitary specifier (``===``) was not correctly
+  allowing pre-releases when it was being used.
+
+* Expose the specifier and version parts through properties on the
+  ``Specifier`` classes.
+
+* Allow iterating over the ``SpecifierSet`` to get access to all of the
+  ``Specifier`` instances.
+
+* Allow testing if a version is contained within a specifier via the ``in``
+  operator.
+
+
+15.1 - 2015-04-13
+~~~~~~~~~~~~~~~~~
+
+* Fix a logic error that was causing inconsistent answers about whether or not
+  a pre-release was contained within a ``SpecifierSet`` or not.
+
+
+15.0 - 2015-01-02
+~~~~~~~~~~~~~~~~~
+
+* Add ``Version().is_postrelease`` and ``LegacyVersion().is_postrelease`` to
+  make it easy to determine if a release is a post release.
+
+* Add ``Version().base_version`` and ``LegacyVersion().base_version`` to make
+  it easy to get the public version without any pre or post release markers.
+
+* Support the update to PEP 440 which removed the implied ``!=V.*`` when using
+  either ``>V`` or ``<V`` and which instead special cased the handling of
+  pre-releases, post-releases, and local versions when using ``>V`` or ``<V``.
+
+
+14.5 - 2014-12-17
+~~~~~~~~~~~~~~~~~
+
+* Normalize release candidates as ``rc`` instead of ``c``.
+
+* Expose the ``VERSION_PATTERN`` constant, a regular expression matching
+  a valid version.
+
+
+14.4 - 2014-12-15
+~~~~~~~~~~~~~~~~~
+
+* Ensure that versions are normalized before comparison when used in a
+  specifier with a less than (``<``) or greater than (``>``) operator.
+
+
+14.3 - 2014-11-19
+~~~~~~~~~~~~~~~~~
+
+* **BACKWARDS INCOMPATIBLE** Refactor specifier support so that it can sanely
+  handle legacy specifiers as well as PEP 440 specifiers.
+
+* **BACKWARDS INCOMPATIBLE** Move the specifier support out of
+  ``packaging.version`` into ``packaging.specifiers``.
+
+
+14.2 - 2014-09-10
+~~~~~~~~~~~~~~~~~
+
+* Add prerelease support to ``Specifier``.
+* Remove the ability to do ``item in Specifier()`` and replace it with
+  ``Specifier().contains(item)`` in order to allow flags that signal if a
+  prerelease should be accepted or not.
+* Add a method ``Specifier().filter()`` which will take an iterable and returns
+  an iterable with items that do not match the specifier filtered out.
+
+
+14.1 - 2014-09-08
+~~~~~~~~~~~~~~~~~
+
+* Allow ``LegacyVersion`` and ``Version`` to be sorted together.
+* Add ``packaging.version.parse()`` to enable easily parsing a version string
+  as either a ``Version`` or a ``LegacyVersion`` depending on it's PEP 440
+  validity.
+
+
+14.0 - 2014-09-05
+~~~~~~~~~~~~~~~~~
+
+* Initial release.
+
+
diff --git a/packaging-15.3.dist-info/RECORD b/packaging-15.3.dist-info/RECORD
new file mode 100644
index 0000000..eed1cde
--- /dev/null
+++ b/packaging-15.3.dist-info/RECORD
@@ -0,0 +1,13 @@
+packaging/__about__.py,sha256=YzdrW-1lWmyCBDyrcNkZbJo4tiDWXpoiqPjfyCYMzIE,1073
+packaging/__init__.py,sha256=2V8n-eEpSgBuXlV8hlMmhU7ZklpsrrusWMZNp2gC4Hs,906
+packaging/_compat.py,sha256=wofog8iYo_zudt_10i6JiXKHDs5GhCuXC09hCuSJiv4,1253
+packaging/_structures.py,sha256=93YvgrEE2HgFp8AdXy0pwCRVnZeutRHO_-puJ7T0cPw,1809
+packaging/specifiers.py,sha256=UV9T01_kKloA8PSeMI3HTYBSJ_4KLs00yLvrlciZ3yU,28079
+packaging/version.py,sha256=dEGrWZJZ6sef1xMxSfDCego2hS3Q86by0hUIFVk-AGc,11949
+packaging-15.3.dist-info/DESCRIPTION.rst,sha256=Gd817S15Dv8sdxUJ9pM4t5Gq_ZYflsJYB64gItd9vM8,3108
+packaging-15.3.dist-info/METADATA,sha256=VV19NM69tso91JP9TM8Pdq3CDomcP6bdn-0q3YUeNvE,3854
+packaging-15.3.dist-info/metadata.json,sha256=RpvEfDwiZ5KQreMlvUvsqF-u5MCqXRNIVml3neejEj0,859
+packaging-15.3.dist-info/pbr.json,sha256=8NuqguCaMTQfH_1HqSV4DBKcPE0Uy_lxkONIfDvWT7Y,46
+packaging-15.3.dist-info/RECORD,,
+packaging-15.3.dist-info/top_level.txt,sha256=zFdHrhWnPslzsiP455HutQsqPB6v0KCtNUMtUtrefDw,10
+packaging-15.3.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110
diff --git a/packaging-15.3.dist-info/WHEEL b/packaging-15.3.dist-info/WHEEL
new file mode 100644
index 0000000..9dff69d
--- /dev/null
+++ b/packaging-15.3.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.24.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/packaging-15.3.dist-info/metadata.json b/packaging-15.3.dist-info/metadata.json
new file mode 100644
index 0000000..e4b5041
--- /dev/null
+++ b/packaging-15.3.dist-info/metadata.json
@@ -0,0 +1 @@
+{"license": "Apache License, Version 2.0", "summary": "Core utilities for Python packages", "extensions": {"python.details": {"project_urls": {"Home": "https://github.com/pypa/packaging"}, "contacts": [{"role": "author", "name": "Donald Stufft", "email": "donald at stufft.io"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "generator": "bdist_wheel (0.24.0)", "version": "15.3", "metadata_version": "2.0", "name": "packaging", "classifiers": ["Intended Audience :: Developers", "Lic [...]
\ No newline at end of file
diff --git a/packaging-15.3.dist-info/pbr.json b/packaging-15.3.dist-info/pbr.json
new file mode 100644
index 0000000..693661c
--- /dev/null
+++ b/packaging-15.3.dist-info/pbr.json
@@ -0,0 +1 @@
+{"git_version": "099fe4e", "is_release": true}
\ No newline at end of file
diff --git a/packaging-15.3.dist-info/top_level.txt b/packaging-15.3.dist-info/top_level.txt
new file mode 100644
index 0000000..748809f
--- /dev/null
+++ b/packaging-15.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+packaging
diff --git a/packaging/__about__.py b/packaging/__about__.py
new file mode 100644
index 0000000..c21a758
--- /dev/null
+++ b/packaging/__about__.py
@@ -0,0 +1,21 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+__all__ = [
+    "__title__", "__summary__", "__uri__", "__version__", "__author__",
+    "__email__", "__license__", "__copyright__",
+]
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "16.7"
+
+__author__ = "Donald Stufft and individual contributors"
+__email__ = "donald at stufft.io"
+
+__license__ = "BSD or Apache License, Version 2.0"
+__copyright__ = "Copyright 2014-2016 %s" % __author__
diff --git a/packaging/__init__.py b/packaging/__init__.py
new file mode 100644
index 0000000..5ee6220
--- /dev/null
+++ b/packaging/__init__.py
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+from .__about__ import (
+    __author__, __copyright__, __email__, __license__, __summary__, __title__,
+    __uri__, __version__
+)
+
+__all__ = [
+    "__title__", "__summary__", "__uri__", "__version__", "__author__",
+    "__email__", "__license__", "__copyright__",
+]
diff --git a/packaging/_compat.py b/packaging/_compat.py
new file mode 100644
index 0000000..210bb80
--- /dev/null
+++ b/packaging/_compat.py
@@ -0,0 +1,30 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+
+# flake8: noqa
+
+if PY3:
+    string_types = str,
+else:
+    string_types = basestring,
+
+
+def with_metaclass(meta, *bases):
+    """
+    Create a base class with a metaclass.
+    """
+    # This requires a bit of explanation: the basic idea is to make a dummy
+    # metaclass for one level of class instantiation that replaces itself with
+    # the actual metaclass.
+    class metaclass(meta):
+        def __new__(cls, name, this_bases, d):
+            return meta(name, bases, d)
+    return type.__new__(metaclass, 'temporary_class', (), {})
diff --git a/packaging/_structures.py b/packaging/_structures.py
new file mode 100644
index 0000000..ccc2786
--- /dev/null
+++ b/packaging/_structures.py
@@ -0,0 +1,68 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+
+class Infinity(object):
+
+    def __repr__(self):
+        return "Infinity"
+
+    def __hash__(self):
+        return hash(repr(self))
+
+    def __lt__(self, other):
+        return False
+
+    def __le__(self, other):
+        return False
+
+    def __eq__(self, other):
+        return isinstance(other, self.__class__)
+
+    def __ne__(self, other):
+        return not isinstance(other, self.__class__)
+
+    def __gt__(self, other):
+        return True
+
+    def __ge__(self, other):
+        return True
+
+    def __neg__(self):
+        return NegativeInfinity
+
+Infinity = Infinity()
+
+
+class NegativeInfinity(object):
+
+    def __repr__(self):
+        return "-Infinity"
+
+    def __hash__(self):
+        return hash(repr(self))
+
+    def __lt__(self, other):
+        return True
+
+    def __le__(self, other):
+        return True
+
+    def __eq__(self, other):
+        return isinstance(other, self.__class__)
+
+    def __ne__(self, other):
+        return not isinstance(other, self.__class__)
+
+    def __gt__(self, other):
+        return False
+
+    def __ge__(self, other):
+        return False
+
+    def __neg__(self):
+        return Infinity
+
+NegativeInfinity = NegativeInfinity()
diff --git a/packaging/markers.py b/packaging/markers.py
new file mode 100644
index 0000000..d321c7f
--- /dev/null
+++ b/packaging/markers.py
@@ -0,0 +1,287 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import operator
+import os
+import platform
+import sys
+
+from pyparsing import ParseException, ParseResults, stringStart, stringEnd
+from pyparsing import ZeroOrMore, Group, Forward, QuotedString
+from pyparsing import Literal as L  # noqa
+
+from ._compat import string_types
+from .specifiers import Specifier, InvalidSpecifier
+
+
+__all__ = [
+    "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
+    "Marker", "default_environment",
+]
+
+
+class InvalidMarker(ValueError):
+    """
+    An invalid marker was found, users should refer to PEP 508.
+    """
+
+
+class UndefinedComparison(ValueError):
+    """
+    An invalid operation was attempted on a value that doesn't support it.
+    """
+
+
+class UndefinedEnvironmentName(ValueError):
+    """
+    A name was attempted to be used that does not exist inside of the
+    environment.
+    """
+
+
+class Node(object):
+
+    def __init__(self, value):
+        self.value = value
+
+    def __str__(self):
+        return str(self.value)
+
+    def __repr__(self):
+        return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
+
+
+class Variable(Node):
+    pass
+
+
+class Value(Node):
+    pass
+
+
+VARIABLE = (
+    L("implementation_version") |
+    L("platform_python_implementation") |
+    L("implementation_name") |
+    L("python_full_version") |
+    L("platform_release") |
+    L("platform_version") |
+    L("platform_machine") |
+    L("platform_system") |
+    L("python_version") |
+    L("sys_platform") |
+    L("os_name") |
+    L("os.name") |  # PEP-345
+    L("sys.platform") |  # PEP-345
+    L("platform.version") |  # PEP-345
+    L("platform.machine") |  # PEP-345
+    L("platform.python_implementation") |  # PEP-345
+    L("python_implementation") |  # undocumented setuptools legacy
+    L("extra")
+)
+ALIASES = {
+    'os.name': 'os_name',
+    'sys.platform': 'sys_platform',
+    'platform.version': 'platform_version',
+    'platform.machine': 'platform_machine',
+    'platform.python_implementation': 'platform_python_implementation',
+    'python_implementation': 'platform_python_implementation'
+}
+VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
+
+VERSION_CMP = (
+    L("===") |
+    L("==") |
+    L(">=") |
+    L("<=") |
+    L("!=") |
+    L("~=") |
+    L(">") |
+    L("<")
+)
+
+MARKER_OP = VERSION_CMP | L("not in") | L("in")
+
+MARKER_VALUE = QuotedString("'") | QuotedString('"')
+MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
+
+BOOLOP = L("and") | L("or")
+
+MARKER_VAR = VARIABLE | MARKER_VALUE
+
+MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
+MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
+
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+
+MARKER_EXPR = Forward()
+MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
+MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
+
+MARKER = stringStart + MARKER_EXPR + stringEnd
+
+
+def _coerce_parse_result(results):
+    if isinstance(results, ParseResults):
+        return [_coerce_parse_result(i) for i in results]
+    else:
+        return results
+
+
+def _format_marker(marker, first=True):
+    assert isinstance(marker, (list, tuple, string_types))
+
+    # Sometimes we have a structure like [[...]] which is a single item list
+    # where the single item is itself it's own list. In that case we want skip
+    # the rest of this function so that we don't get extraneous () on the
+    # outside.
+    if (isinstance(marker, list) and len(marker) == 1 and
+            isinstance(marker[0], (list, tuple))):
+        return _format_marker(marker[0])
+
+    if isinstance(marker, list):
+        inner = (_format_marker(m, first=False) for m in marker)
+        if first:
+            return " ".join(inner)
+        else:
+            return "(" + " ".join(inner) + ")"
+    elif isinstance(marker, tuple):
+        return '{0} {1} "{2}"'.format(*marker)
+    else:
+        return marker
+
+
+_operators = {
+    "in": lambda lhs, rhs: lhs in rhs,
+    "not in": lambda lhs, rhs: lhs not in rhs,
+    "<": operator.lt,
+    "<=": operator.le,
+    "==": operator.eq,
+    "!=": operator.ne,
+    ">=": operator.ge,
+    ">": operator.gt,
+}
+
+
+def _eval_op(lhs, op, rhs):
+    try:
+        spec = Specifier("".join([op, rhs]))
+    except InvalidSpecifier:
+        pass
+    else:
+        return spec.contains(lhs)
+
+    oper = _operators.get(op)
+    if oper is None:
+        raise UndefinedComparison(
+            "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
+        )
+
+    return oper(lhs, rhs)
+
+
+_undefined = object()
+
+
+def _get_env(environment, name):
+    value = environment.get(name, _undefined)
+
+    if value is _undefined:
+        raise UndefinedEnvironmentName(
+            "{0!r} does not exist in evaluation environment.".format(name)
+        )
+
+    return value
+
+
+def _evaluate_markers(markers, environment):
+    groups = [[]]
+
+    for marker in markers:
+        assert isinstance(marker, (list, tuple, string_types))
+
+        if isinstance(marker, list):
+            groups[-1].append(_evaluate_markers(marker, environment))
+        elif isinstance(marker, tuple):
+            lhs, op, rhs = marker
+
+            if isinstance(lhs, Variable):
+                lhs_value = _get_env(environment, lhs.value)
+                rhs_value = rhs.value
+            else:
+                lhs_value = lhs.value
+                rhs_value = _get_env(environment, rhs.value)
+
+            groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+        else:
+            assert marker in ["and", "or"]
+            if marker == "or":
+                groups.append([])
+
+    return any(all(item) for item in groups)
+
+
+def format_full_version(info):
+    version = '{0.major}.{0.minor}.{0.micro}'.format(info)
+    kind = info.releaselevel
+    if kind != 'final':
+        version += kind[0] + str(info.serial)
+    return version
+
+
+def default_environment():
+    if hasattr(sys, 'implementation'):
+        iver = format_full_version(sys.implementation.version)
+        implementation_name = sys.implementation.name
+    else:
+        iver = '0'
+        implementation_name = ''
+
+    return {
+        "implementation_name": implementation_name,
+        "implementation_version": iver,
+        "os_name": os.name,
+        "platform_machine": platform.machine(),
+        "platform_release": platform.release(),
+        "platform_system": platform.system(),
+        "platform_version": platform.version(),
+        "python_full_version": platform.python_version(),
+        "platform_python_implementation": platform.python_implementation(),
+        "python_version": platform.python_version()[:3],
+        "sys_platform": sys.platform,
+    }
+
+
+class Marker(object):
+
+    def __init__(self, marker):
+        try:
+            self._markers = _coerce_parse_result(MARKER.parseString(marker))
+        except ParseException as e:
+            err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
+                marker, marker[e.loc:e.loc + 8])
+            raise InvalidMarker(err_str)
+
+    def __str__(self):
+        return _format_marker(self._markers)
+
+    def __repr__(self):
+        return "<Marker({0!r})>".format(str(self))
+
+    def evaluate(self, environment=None):
+        """Evaluate a marker.
+
+        Return the boolean from evaluating the given marker against the
+        environment. environment is an optional argument to override all or
+        part of the determined environment.
+
+        The environment is determined from the current Python process.
+        """
+        current_environment = default_environment()
+        if environment is not None:
+            current_environment.update(environment)
+
+        return _evaluate_markers(self._markers, current_environment)
diff --git a/packaging/requirements.py b/packaging/requirements.py
new file mode 100644
index 0000000..a1bb414
--- /dev/null
+++ b/packaging/requirements.py
@@ -0,0 +1,127 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import string
+import re
+
+from pyparsing import stringStart, stringEnd, originalTextFor, ParseException
+from pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
+from pyparsing import Literal as L  # noqa
+from six.moves.urllib import parse as urlparse
+
+from .markers import MARKER_EXPR, Marker
+from .specifiers import LegacySpecifier, Specifier, SpecifierSet
+
+
+class InvalidRequirement(ValueError):
+    """
+    An invalid requirement was found, users should refer to PEP 508.
+    """
+
+
+ALPHANUM = Word(string.ascii_letters + string.digits)
+
+LBRACKET = L("[").suppress()
+RBRACKET = L("]").suppress()
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+COMMA = L(",").suppress()
+SEMICOLON = L(";").suppress()
+AT = L("@").suppress()
+
+PUNCTUATION = Word("-_.")
+IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
+IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
+
+NAME = IDENTIFIER("name")
+EXTRA = IDENTIFIER
+
+URI = Regex(r'[^ ]+')("url")
+URL = (AT + URI)
+
+EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
+EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
+
+VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
+VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
+
+VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
+VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
+                       joinString=",", adjacent=False)("_raw_spec")
+_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
+_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
+
+VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
+VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
+
+MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
+MARKER_EXPR.setParseAction(
+    lambda s, l, t: Marker(s[t._original_start:t._original_end])
+)
+MARKER_SEPERATOR = SEMICOLON
+MARKER = MARKER_SEPERATOR + MARKER_EXPR
+
+VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
+URL_AND_MARKER = URL + Optional(MARKER)
+
+NAMED_REQUIREMENT = \
+    NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
+
+REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
+
+
+class Requirement(object):
+    """Parse a requirement.
+
+    Parse a given requirement string into its parts, such as name, specifier,
+    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+    string.
+    """
+
+    # TODO: Can we test whether something is contained within a requirement?
+    #       If so how do we do that? Do we need to test against the _name_ of
+    #       the thing as well as the version? What about the markers?
+    # TODO: Can we normalize the name and extra name?
+
+    def __init__(self, requirement_string):
+        try:
+            req = REQUIREMENT.parseString(requirement_string)
+        except ParseException as e:
+            raise InvalidRequirement(
+                "Invalid requirement, parse error at \"{0!r}\"".format(
+                    requirement_string[e.loc:e.loc + 8]))
+
+        self.name = req.name
+        if req.url:
+            parsed_url = urlparse.urlparse(req.url)
+            if not (parsed_url.scheme and parsed_url.netloc) or (
+                    not parsed_url.scheme and not parsed_url.netloc):
+                raise InvalidRequirement("Invalid URL given")
+            self.url = req.url
+        else:
+            self.url = None
+        self.extras = set(req.extras.asList() if req.extras else [])
+        self.specifier = SpecifierSet(req.specifier)
+        self.marker = req.marker if req.marker else None
+
+    def __str__(self):
+        parts = [self.name]
+
+        if self.extras:
+            parts.append("[{0}]".format(",".join(sorted(self.extras))))
+
+        if self.specifier:
+            parts.append(str(self.specifier))
+
+        if self.url:
+            parts.append("@ {0}".format(self.url))
+
+        if self.marker:
+            parts.append("; {0}".format(self.marker))
+
+        return "".join(parts)
+
+    def __repr__(self):
+        return "<Requirement({0!r})>".format(str(self))
diff --git a/packaging/specifiers.py b/packaging/specifiers.py
new file mode 100644
index 0000000..7f5a76c
--- /dev/null
+++ b/packaging/specifiers.py
@@ -0,0 +1,774 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import abc
+import functools
+import itertools
+import re
+
+from ._compat import string_types, with_metaclass
+from .version import Version, LegacyVersion, parse
+
+
+class InvalidSpecifier(ValueError):
+    """
+    An invalid specifier was found, users should refer to PEP 440.
+    """
+
+
+class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
+
+    @abc.abstractmethod
+    def __str__(self):
+        """
+        Returns the str representation of this Specifier like object. This
+        should be representative of the Specifier itself.
+        """
+
+    @abc.abstractmethod
+    def __hash__(self):
+        """
+        Returns a hash value for this Specifier like object.
+        """
+
+    @abc.abstractmethod
+    def __eq__(self, other):
+        """
+        Returns a boolean representing whether or not the two Specifier like
+        objects are equal.
+        """
+
+    @abc.abstractmethod
+    def __ne__(self, other):
+        """
+        Returns a boolean representing whether or not the two Specifier like
+        objects are not equal.
+        """
+
+    @abc.abstractproperty
+    def prereleases(self):
+        """
+        Returns whether or not pre-releases as a whole are allowed by this
+        specifier.
+        """
+
+    @prereleases.setter
+    def prereleases(self, value):
+        """
+        Sets whether or not pre-releases as a whole are allowed by this
+        specifier.
+        """
+
+    @abc.abstractmethod
+    def contains(self, item, prereleases=None):
+        """
+        Determines if the given item is contained within this specifier.
+        """
+
+    @abc.abstractmethod
+    def filter(self, iterable, prereleases=None):
+        """
+        Takes an iterable of items and filters them so that only items which
+        are contained within this specifier are allowed in it.
+        """
+
+
+class _IndividualSpecifier(BaseSpecifier):
+
+    _operators = {}
+
+    def __init__(self, spec="", prereleases=None):
+        match = self._regex.search(spec)
+        if not match:
+            raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
+
+        self._spec = (
+            match.group("operator").strip(),
+            match.group("version").strip(),
+        )
+
+        # Store whether or not this Specifier should accept prereleases
+        self._prereleases = prereleases
+
+    def __repr__(self):
+        pre = (
+            ", prereleases={0!r}".format(self.prereleases)
+            if self._prereleases is not None
+            else ""
+        )
+
+        return "<{0}({1!r}{2})>".format(
+            self.__class__.__name__,
+            str(self),
+            pre,
+        )
+
+    def __str__(self):
+        return "{0}{1}".format(*self._spec)
+
+    def __hash__(self):
+        return hash(self._spec)
+
+    def __eq__(self, other):
+        if isinstance(other, string_types):
+            try:
+                other = self.__class__(other)
+            except InvalidSpecifier:
+                return NotImplemented
+        elif not isinstance(other, self.__class__):
+            return NotImplemented
+
+        return self._spec == other._spec
+
+    def __ne__(self, other):
+        if isinstance(other, string_types):
+            try:
+                other = self.__class__(other)
+            except InvalidSpecifier:
+                return NotImplemented
+        elif not isinstance(other, self.__class__):
+            return NotImplemented
+
+        return self._spec != other._spec
+
+    def _get_operator(self, op):
+        return getattr(self, "_compare_{0}".format(self._operators[op]))
+
+    def _coerce_version(self, version):
+        if not isinstance(version, (LegacyVersion, Version)):
+            version = parse(version)
+        return version
+
+    @property
+    def operator(self):
+        return self._spec[0]
+
+    @property
+    def version(self):
+        return self._spec[1]
+
+    @property
+    def prereleases(self):
+        return self._prereleases
+
+    @prereleases.setter
+    def prereleases(self, value):
+        self._prereleases = value
+
+    def __contains__(self, item):
+        return self.contains(item)
+
+    def contains(self, item, prereleases=None):
+        # Determine if prereleases are to be allowed or not.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # Normalize item to a Version or LegacyVersion, this allows us to have
+        # a shortcut for ``"2.0" in Specifier(">=2")
+        item = self._coerce_version(item)
+
+        # Determine if we should be supporting prereleases in this specifier
+        # or not, if we do not support prereleases than we can short circuit
+        # logic if this version is a prereleases.
+        if item.is_prerelease and not prereleases:
+            return False
+
+        # Actually do the comparison to determine if this item is contained
+        # within this Specifier or not.
+        return self._get_operator(self.operator)(item, self.version)
+
+    def filter(self, iterable, prereleases=None):
+        yielded = False
+        found_prereleases = []
+
+        kw = {"prereleases": prereleases if prereleases is not None else True}
+
+        # Attempt to iterate over all the values in the iterable and if any of
+        # them match, yield them.
+        for version in iterable:
+            parsed_version = self._coerce_version(version)
+
+            if self.contains(parsed_version, **kw):
+                # If our version is a prerelease, and we were not set to allow
+                # prereleases, then we'll store it for later incase nothing
+                # else matches this specifier.
+                if (parsed_version.is_prerelease and not
+                        (prereleases or self.prereleases)):
+                    found_prereleases.append(version)
+                # Either this is not a prerelease, or we should have been
+                # accepting prereleases from the begining.
+                else:
+                    yielded = True
+                    yield version
+
+        # Now that we've iterated over everything, determine if we've yielded
+        # any values, and if we have not and we have any prereleases stored up
+        # then we will go ahead and yield the prereleases.
+        if not yielded and found_prereleases:
+            for version in found_prereleases:
+                yield version
+
+
+class LegacySpecifier(_IndividualSpecifier):
+
+    _regex_str = (
+        r"""
+        (?P<operator>(==|!=|<=|>=|<|>))
+        \s*
+        (?P<version>
+            [^,;\s)]* # Since this is a "legacy" specifier, and the version
+                      # string can be just about anything, we match everything
+                      # except for whitespace, a semi-colon for marker support,
+                      # a closing paren since versions can be enclosed in
+                      # them, and a comma since it's a version separator.
+        )
+        """
+    )
+
+    _regex = re.compile(
+        r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+    _operators = {
+        "==": "equal",
+        "!=": "not_equal",
+        "<=": "less_than_equal",
+        ">=": "greater_than_equal",
+        "<": "less_than",
+        ">": "greater_than",
+    }
+
+    def _coerce_version(self, version):
+        if not isinstance(version, LegacyVersion):
+            version = LegacyVersion(str(version))
+        return version
+
+    def _compare_equal(self, prospective, spec):
+        return prospective == self._coerce_version(spec)
+
+    def _compare_not_equal(self, prospective, spec):
+        return prospective != self._coerce_version(spec)
+
+    def _compare_less_than_equal(self, prospective, spec):
+        return prospective <= self._coerce_version(spec)
+
+    def _compare_greater_than_equal(self, prospective, spec):
+        return prospective >= self._coerce_version(spec)
+
+    def _compare_less_than(self, prospective, spec):
+        return prospective < self._coerce_version(spec)
+
+    def _compare_greater_than(self, prospective, spec):
+        return prospective > self._coerce_version(spec)
+
+
+def _require_version_compare(fn):
+    @functools.wraps(fn)
+    def wrapped(self, prospective, spec):
+        if not isinstance(prospective, Version):
+            return False
+        return fn(self, prospective, spec)
+    return wrapped
+
+
+class Specifier(_IndividualSpecifier):
+
+    _regex_str = (
+        r"""
+        (?P<operator>(~=|==|!=|<=|>=|<|>|===))
+        (?P<version>
+            (?:
+                # The identity operators allow for an escape hatch that will
+                # do an exact string match of the version you wish to install.
+                # This will not be parsed by PEP 440 and we cannot determine
+                # any semantic meaning from it. This operator is discouraged
+                # but included entirely as an escape hatch.
+                (?<====)  # Only match for the identity operator
+                \s*
+                [^\s]*    # We just match everything, except for whitespace
+                          # since we are only testing for strict identity.
+            )
+            |
+            (?:
+                # The (non)equality operators allow for wild card and local
+                # versions to be specified so we have to define these two
+                # operators separately to enable that.
+                (?<===|!=)            # Only match for equals and not equals
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)*   # release
+                (?:                   # pre release
+                    [-_\.]?
+                    (a|b|c|rc|alpha|beta|pre|preview)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+
+                # You cannot use a wild card and a dev or local version
+                # together so group them with a | and make them optional.
+                (?:
+                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
+                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+                    |
+                    \.\*  # Wild card syntax of .*
+                )?
+            )
+            |
+            (?:
+                # The compatible operator requires at least two digits in the
+                # release segment.
+                (?<=~=)               # Only match for the compatible operator
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
+                (?:                   # pre release
+                    [-_\.]?
+                    (a|b|c|rc|alpha|beta|pre|preview)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
+            )
+            |
+            (?:
+                # All other operators only allow a sub set of what the
+                # (non)equality operators do. Specifically they do not allow
+                # local versions to be specified nor do they allow the prefix
+                # matching wild cards.
+                (?<!==|!=|~=)         # We have special cases for these
+                                      # operators so we want to make sure they
+                                      # don't match here.
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)*   # release
+                (?:                   # pre release
+                    [-_\.]?
+                    (a|b|c|rc|alpha|beta|pre|preview)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
+            )
+        )
+        """
+    )
+
+    _regex = re.compile(
+        r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+    _operators = {
+        "~=": "compatible",
+        "==": "equal",
+        "!=": "not_equal",
+        "<=": "less_than_equal",
+        ">=": "greater_than_equal",
+        "<": "less_than",
+        ">": "greater_than",
+        "===": "arbitrary",
+    }
+
+    @_require_version_compare
+    def _compare_compatible(self, prospective, spec):
+        # Compatible releases have an equivalent combination of >= and ==. That
+        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+        # implement this in terms of the other specifiers instead of
+        # implementing it ourselves. The only thing we need to do is construct
+        # the other specifiers.
+
+        # We want everything but the last item in the version, but we want to
+        # ignore post and dev releases and we want to treat the pre-release as
+        # it's own separate segment.
+        prefix = ".".join(
+            list(
+                itertools.takewhile(
+                    lambda x: (not x.startswith("post") and not
+                               x.startswith("dev")),
+                    _version_split(spec),
+                )
+            )[:-1]
+        )
+
+        # Add the prefix notation to the end of our string
+        prefix += ".*"
+
+        return (self._get_operator(">=")(prospective, spec) and
+                self._get_operator("==")(prospective, prefix))
+
+    @_require_version_compare
+    def _compare_equal(self, prospective, spec):
+        # We need special logic to handle prefix matching
+        if spec.endswith(".*"):
+            # In the case of prefix matching we want to ignore local segment.
+            prospective = Version(prospective.public)
+            # Split the spec out by dots, and pretend that there is an implicit
+            # dot in between a release segment and a pre-release segment.
+            spec = _version_split(spec[:-2])  # Remove the trailing .*
+
+            # Split the prospective version out by dots, and pretend that there
+            # is an implicit dot in between a release segment and a pre-release
+            # segment.
+            prospective = _version_split(str(prospective))
+
+            # Shorten the prospective version to be the same length as the spec
+            # so that we can determine if the specifier is a prefix of the
+            # prospective version or not.
+            prospective = prospective[:len(spec)]
+
+            # Pad out our two sides with zeros so that they both equal the same
+            # length.
+            spec, prospective = _pad_version(spec, prospective)
+        else:
+            # Convert our spec string into a Version
+            spec = Version(spec)
+
+            # If the specifier does not have a local segment, then we want to
+            # act as if the prospective version also does not have a local
+            # segment.
+            if not spec.local:
+                prospective = Version(prospective.public)
+
+        return prospective == spec
+
+    @_require_version_compare
+    def _compare_not_equal(self, prospective, spec):
+        return not self._compare_equal(prospective, spec)
+
+    @_require_version_compare
+    def _compare_less_than_equal(self, prospective, spec):
+        return prospective <= Version(spec)
+
+    @_require_version_compare
+    def _compare_greater_than_equal(self, prospective, spec):
+        return prospective >= Version(spec)
+
+    @_require_version_compare
+    def _compare_less_than(self, prospective, spec):
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec)
+
+        # Check to see if the prospective version is less than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective < spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a pre-release version, that we do not accept pre-release
+        # versions for the version mentioned in the specifier (e.g. <3.1 should
+        # not match 3.1.dev0, but should match 3.0.dev0).
+        if not spec.is_prerelease and prospective.is_prerelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # less than the spec version *and* it's not a pre-release of the same
+        # version in the spec.
+        return True
+
+    @_require_version_compare
+    def _compare_greater_than(self, prospective, spec):
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec)
+
+        # Check to see if the prospective version is greater than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective > spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a post-release version, that we do not accept
+        # post-release versions for the version mentioned in the specifier
+        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+        if not spec.is_postrelease and prospective.is_postrelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # Ensure that we do not allow a local version of the version mentioned
+        # in the specifier, which is techincally greater than, to match.
+        if prospective.local is not None:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # greater than the spec version *and* it's not a pre-release of the
+        # same version in the spec.
+        return True
+
+    def _compare_arbitrary(self, prospective, spec):
+        return str(prospective).lower() == str(spec).lower()
+
+    @property
+    def prereleases(self):
+        # If there is an explicit prereleases set for this, then we'll just
+        # blindly use that.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # Look at all of our specifiers and determine if they are inclusive
+        # operators, and if they are if they are including an explicit
+        # prerelease.
+        operator, version = self._spec
+        if operator in ["==", ">=", "<=", "~=", "==="]:
+            # The == specifier can include a trailing .*, if it does we
+            # want to remove before parsing.
+            if operator == "==" and version.endswith(".*"):
+                version = version[:-2]
+
+            # Parse the version, and if it is a pre-release than this
+            # specifier allows pre-releases.
+            if parse(version).is_prerelease:
+                return True
+
+        return False
+
+    @prereleases.setter
+    def prereleases(self, value):
+        self._prereleases = value
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version):
+    result = []
+    for item in version.split("."):
+        match = _prefix_regex.search(item)
+        if match:
+            result.extend(match.groups())
+        else:
+            result.append(item)
+    return result
+
+
+def _pad_version(left, right):
+    left_split, right_split = [], []
+
+    # Get the release segment of our versions
+    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+    # Get the rest of our versions
+    left_split.append(left[len(left_split[0]):])
+    right_split.append(right[len(right_split[0]):])
+
+    # Insert our padding
+    left_split.insert(
+        1,
+        ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
+    )
+    right_split.insert(
+        1,
+        ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
+    )
+
+    return (
+        list(itertools.chain(*left_split)),
+        list(itertools.chain(*right_split)),
+    )
+
+
+class SpecifierSet(BaseSpecifier):
+
+    def __init__(self, specifiers="", prereleases=None):
+        # Split on , to break each indidivual specifier into it's own item, and
+        # strip each item to remove leading/trailing whitespace.
+        specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+        # Parsed each individual specifier, attempting first to make it a
+        # Specifier and falling back to a LegacySpecifier.
+        parsed = set()
+        for specifier in specifiers:
+            try:
+                parsed.add(Specifier(specifier))
+            except InvalidSpecifier:
+                parsed.add(LegacySpecifier(specifier))
+
+        # Turn our parsed specifiers into a frozen set and save them for later.
+        self._specs = frozenset(parsed)
+
+        # Store our prereleases value so we can use it later to determine if
+        # we accept prereleases or not.
+        self._prereleases = prereleases
+
+    def __repr__(self):
+        pre = (
+            ", prereleases={0!r}".format(self.prereleases)
+            if self._prereleases is not None
+            else ""
+        )
+
+        return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
+
+    def __str__(self):
+        return ",".join(sorted(str(s) for s in self._specs))
+
+    def __hash__(self):
+        return hash(self._specs)
+
+    def __and__(self, other):
+        if isinstance(other, string_types):
+            other = SpecifierSet(other)
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        specifier = SpecifierSet()
+        specifier._specs = frozenset(self._specs | other._specs)
+
+        if self._prereleases is None and other._prereleases is not None:
+            specifier._prereleases = other._prereleases
+        elif self._prereleases is not None and other._prereleases is None:
+            specifier._prereleases = self._prereleases
+        elif self._prereleases == other._prereleases:
+            specifier._prereleases = self._prereleases
+        else:
+            raise ValueError(
+                "Cannot combine SpecifierSets with True and False prerelease "
+                "overrides."
+            )
+
+        return specifier
+
+    def __eq__(self, other):
+        if isinstance(other, string_types):
+            other = SpecifierSet(other)
+        elif isinstance(other, _IndividualSpecifier):
+            other = SpecifierSet(str(other))
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        return self._specs == other._specs
+
+    def __ne__(self, other):
+        if isinstance(other, string_types):
+            other = SpecifierSet(other)
+        elif isinstance(other, _IndividualSpecifier):
+            other = SpecifierSet(str(other))
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        return self._specs != other._specs
+
+    def __len__(self):
+        return len(self._specs)
+
+    def __iter__(self):
+        return iter(self._specs)
+
+    @property
+    def prereleases(self):
+        # If we have been given an explicit prerelease modifier, then we'll
+        # pass that through here.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # If we don't have any specifiers, and we don't have a forced value,
+        # then we'll just return None since we don't know if this should have
+        # pre-releases or not.
+        if not self._specs:
+            return None
+
+        # Otherwise we'll see if any of the given specifiers accept
+        # prereleases, if any of them do we'll return True, otherwise False.
+        return any(s.prereleases for s in self._specs)
+
+    @prereleases.setter
+    def prereleases(self, value):
+        self._prereleases = value
+
+    def __contains__(self, item):
+        return self.contains(item)
+
+    def contains(self, item, prereleases=None):
+        # Ensure that our item is a Version or LegacyVersion instance.
+        if not isinstance(item, (LegacyVersion, Version)):
+            item = parse(item)
+
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # We can determine if we're going to allow pre-releases by looking to
+        # see if any of the underlying items supports them. If none of them do
+        # and this item is a pre-release then we do not allow it and we can
+        # short circuit that here.
+        # Note: This means that 1.0.dev1 would not be contained in something
+        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+        if not prereleases and item.is_prerelease:
+            return False
+
+        # We simply dispatch to the underlying specs here to make sure that the
+        # given version is contained within all of them.
+        # Note: This use of all() here means that an empty set of specifiers
+        #       will always return True, this is an explicit design decision.
+        return all(
+            s.contains(item, prereleases=prereleases)
+            for s in self._specs
+        )
+
+    def filter(self, iterable, prereleases=None):
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # If we have any specifiers, then we want to wrap our iterable in the
+        # filter method for each one, this will act as a logical AND amongst
+        # each specifier.
+        if self._specs:
+            for spec in self._specs:
+                iterable = spec.filter(iterable, prereleases=bool(prereleases))
+            return iterable
+        # If we do not have any specifiers, then we need to have a rough filter
+        # which will filter out any pre-releases, unless there are no final
+        # releases, and which will filter out LegacyVersion in general.
+        else:
+            filtered = []
+            found_prereleases = []
+
+            for item in iterable:
+                # Ensure that we some kind of Version class for this item.
+                if not isinstance(item, (LegacyVersion, Version)):
+                    parsed_version = parse(item)
+                else:
+                    parsed_version = item
+
+                # Filter out any item which is parsed as a LegacyVersion
+                if isinstance(parsed_version, LegacyVersion):
+                    continue
+
+                # Store any item which is a pre-release for later unless we've
+                # already found a final version or we are accepting prereleases
+                if parsed_version.is_prerelease and not prereleases:
+                    if not filtered:
+                        found_prereleases.append(item)
+                else:
+                    filtered.append(item)
+
+            # If we've found no items except for pre-releases, then we'll go
+            # ahead and use the pre-releases
+            if not filtered and found_prereleases and prereleases is None:
+                return found_prereleases
+
+            return filtered
diff --git a/packaging/utils.py b/packaging/utils.py
new file mode 100644
index 0000000..942387c
--- /dev/null
+++ b/packaging/utils.py
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import re
+
+
+_canonicalize_regex = re.compile(r"[-_.]+")
+
+
+def canonicalize_name(name):
+    # This is taken from PEP 503.
+    return _canonicalize_regex.sub("-", name).lower()
diff --git a/packaging/version.py b/packaging/version.py
new file mode 100644
index 0000000..83b5ee8
--- /dev/null
+++ b/packaging/version.py
@@ -0,0 +1,393 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import collections
+import itertools
+import re
+
+from ._structures import Infinity
+
+
+__all__ = [
+    "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
+]
+
+
+_Version = collections.namedtuple(
+    "_Version",
+    ["epoch", "release", "dev", "pre", "post", "local"],
+)
+
+
+def parse(version):
+    """
+    Parse the given version string and return either a :class:`Version` object
+    or a :class:`LegacyVersion` object depending on if the given version is
+    a valid PEP 440 version or a legacy version.
+    """
+    try:
+        return Version(version)
+    except InvalidVersion:
+        return LegacyVersion(version)
+
+
+class InvalidVersion(ValueError):
+    """
+    An invalid version was found, users should refer to PEP 440.
+    """
+
+
+class _BaseVersion(object):
+
+    def __hash__(self):
+        return hash(self._key)
+
+    def __lt__(self, other):
+        return self._compare(other, lambda s, o: s < o)
+
+    def __le__(self, other):
+        return self._compare(other, lambda s, o: s <= o)
+
+    def __eq__(self, other):
+        return self._compare(other, lambda s, o: s == o)
+
+    def __ge__(self, other):
+        return self._compare(other, lambda s, o: s >= o)
+
+    def __gt__(self, other):
+        return self._compare(other, lambda s, o: s > o)
+
+    def __ne__(self, other):
+        return self._compare(other, lambda s, o: s != o)
+
+    def _compare(self, other, method):
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return method(self._key, other._key)
+
+
+class LegacyVersion(_BaseVersion):
+
+    def __init__(self, version):
+        self._version = str(version)
+        self._key = _legacy_cmpkey(self._version)
+
+    def __str__(self):
+        return self._version
+
+    def __repr__(self):
+        return "<LegacyVersion({0})>".format(repr(str(self)))
+
+    @property
+    def public(self):
+        return self._version
+
+    @property
+    def base_version(self):
+        return self._version
+
+    @property
+    def local(self):
+        return None
+
+    @property
+    def is_prerelease(self):
+        return False
+
+    @property
+    def is_postrelease(self):
+        return False
+
+
+_legacy_version_component_re = re.compile(
+    r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
+)
+
+_legacy_version_replacement_map = {
+    "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
+}
+
+
+def _parse_version_parts(s):
+    for part in _legacy_version_component_re.split(s):
+        part = _legacy_version_replacement_map.get(part, part)
+
+        if not part or part == ".":
+            continue
+
+        if part[:1] in "0123456789":
+            # pad for numeric comparison
+            yield part.zfill(8)
+        else:
+            yield "*" + part
+
+    # ensure that alpha/beta/candidate are before final
+    yield "*final"
+
+
+def _legacy_cmpkey(version):
+    # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
+    # greater than or equal to 0. This will effectively put the LegacyVersion,
+    # which uses the defacto standard originally implemented by setuptools,
+    # as before all PEP 440 versions.
+    epoch = -1
+
+    # This scheme is taken from pkg_resources.parse_version setuptools prior to
+    # it's adoption of the packaging library.
+    parts = []
+    for part in _parse_version_parts(version.lower()):
+        if part.startswith("*"):
+            # remove "-" before a prerelease tag
+            if part < "*final":
+                while parts and parts[-1] == "*final-":
+                    parts.pop()
+
+            # remove trailing zeros from each series of numeric parts
+            while parts and parts[-1] == "00000000":
+                parts.pop()
+
+        parts.append(part)
+    parts = tuple(parts)
+
+    return epoch, parts
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+VERSION_PATTERN = r"""
+    v?
+    (?:
+        (?:(?P<epoch>[0-9]+)!)?                           # epoch
+        (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment
+        (?P<pre>                                          # pre-release
+            [-_\.]?
+            (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P<pre_n>[0-9]+)?
+        )?
+        (?P<post>                                         # post release
+            (?:-(?P<post_n1>[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?P<post_l>post|rev|r)
+                [-_\.]?
+                (?P<post_n2>[0-9]+)?
+            )
+        )?
+        (?P<dev>                                          # dev release
+            [-_\.]?
+            (?P<dev_l>dev)
+            [-_\.]?
+            (?P<dev_n>[0-9]+)?
+        )?
+    )
+    (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    def __init__(self, version):
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(
+                match.group("pre_l"),
+                match.group("pre_n"),
+            ),
+            post=_parse_letter_version(
+                match.group("post_l"),
+                match.group("post_n1") or match.group("post_n2"),
+            ),
+            dev=_parse_letter_version(
+                match.group("dev_l"),
+                match.group("dev_n"),
+            ),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self):
+        return "<Version({0})>".format(repr(str(self)))
+
+    def __str__(self):
+        parts = []
+
+        # Epoch
+        if self._version.epoch != 0:
+            parts.append("{0}!".format(self._version.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self._version.release))
+
+        # Pre-release
+        if self._version.pre is not None:
+            parts.append("".join(str(x) for x in self._version.pre))
+
+        # Post-release
+        if self._version.post is not None:
+            parts.append(".post{0}".format(self._version.post[1]))
+
+        # Development release
+        if self._version.dev is not None:
+            parts.append(".dev{0}".format(self._version.dev[1]))
+
+        # Local version segment
+        if self._version.local is not None:
+            parts.append(
+                "+{0}".format(".".join(str(x) for x in self._version.local))
+            )
+
+        return "".join(parts)
+
+    @property
+    def public(self):
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self):
+        parts = []
+
+        # Epoch
+        if self._version.epoch != 0:
+            parts.append("{0}!".format(self._version.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self._version.release))
+
+        return "".join(parts)
+
+    @property
+    def local(self):
+        version_string = str(self)
+        if "+" in version_string:
+            return version_string.split("+", 1)[1]
+
+    @property
+    def is_prerelease(self):
+        return bool(self._version.dev or self._version.pre)
+
+    @property
+    def is_postrelease(self):
+        return bool(self._version.post)
+
+
+def _parse_letter_version(letter, number):
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+
+_local_version_seperators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_seperators.split(local)
+        )
+
+
+def _cmpkey(epoch, release, pre, post, dev, local):
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    release = tuple(
+        reversed(list(
+            itertools.dropwhile(
+                lambda x: x == 0,
+                reversed(release),
+            )
+        ))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        pre = -Infinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        pre = Infinity
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        post = -Infinity
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        dev = Infinity
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        local = -Infinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        local = tuple(
+            (i, "") if isinstance(i, int) else (-Infinity, i)
+            for i in local
+        )
+
+    return epoch, release, pre, post, dev, local
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 04f77c6..8180010 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,5 +1,5 @@
 setuptools
-Numpy>=1.8.0
+Numpy>=1.4.1
 Cython>=0.19
 descartes==1.0.1
 packaging
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..5ee6477
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,2 @@
+[pytest]
+testpaths = tests
diff --git a/setup.py b/setup.py
index abe841a..1e12ada 100755
--- a/setup.py
+++ b/setup.py
@@ -1,17 +1,59 @@
 #!/usr/bin/env python
 
-# Two environment variables influence this script.
+# Build or install Shapely distributions
 #
-# GEOS_LIBRARY_PATH: a path to a GEOS C shared library.
+# This script has two different uses.
 #
-# GEOS_CONFIG: the path to a geos-config program that points to GEOS version,
-# headers, and libraries.
+# 1) Installing from a source distribution, whether via
+#
+#      ``python setup.py install``
+#
+#    after downloading a source distribution, or
+#
+#      ``pip install shapely``
+#
+#    on a platform for which pip cannot find a wheel. This will most
+#    often be the case for Linux, since the project is not yet
+#    publishing Linux wheels. This will never be the case on Windows and
+#    rarely the case on OS X; both are wheels-first platforms.
+#
+# 2) Building distributions (source or wheel) from a repository. This
+#    includes using Cython to generate C source for the speedups and
+#    vectorize modules from Shapely's .pyx files.
+#
+# On import, Shapely loads a GEOS shared library. GEOS is a run time
+# requirement. Additionally, the speedups and vectorized C extension
+# modules need GEOS headers and libraries to be built. Shapely versions
+# >=1.3 require GEOS >= 3.3.
+#
+# For the first use case (see 1, above), we aim to treat GEOS as if it
+# were a Python requirement listed in ``install_requires``. That is, in
+# an environment with Shapely 1.2.x and GEOS 3.2, the command ``pip
+# install shapely >=1.3 --no-use-wheel`` (whether wheels are explicitly
+# opted against or are not published for the platform) should fail with
+# a warning and advice to upgrade GEOS to >=3.3.
+#
+# In case 1, the environment's GEOS version is determined by executing
+# the geos-config script. If the GEOS version returned by that script is
+# incompatible with the Shapely source distribution or no geos-config
+# script can be found, this setup script will fail.
+#
+# For the second use case (see 2, distribution building, above), we
+# allow the requirements to be loosened. If this script finds that the
+# environment variable NO_GEOS_CHECK is set, geos-config will not be
+# executed and no attempt will be made to enforce requirements as in the
+# second case.
+#
+# For both cases, a geos-config not in the environment's $PATH may be
+# used by setting the environment variable GEOS_CONFIG to the path to
+# a geos-config script.
 #
 # NB: within this setup scripts, software versions are evaluated according
 # to https://www.python.org/dev/peps/pep-0440/.
 
 import errno
 import glob
+import itertools as it
 import logging
 import os
 import platform
@@ -31,7 +73,7 @@ except ImportError:
 from distutils.errors import CCompilerError, DistutilsExecError, \
     DistutilsPlatformError
 
-from distutils.version import StrictVersion as Version
+from packaging.version import Version
 
 # Get geos_version from GEOS dynamic library, which depends on
 # GEOS_LIBRARY_PATH and/or GEOS_CONFIG environment variables
@@ -45,6 +87,36 @@ log = logging.getLogger(__file__)
 if 'all' in sys.warnoptions:
     log.level = logging.DEBUG
 
+
+class GEOSConfig(object):
+    """Interface to config options from the `geos-config` utility
+    """
+
+    def __init__(self, cmd):
+        self.cmd = cmd
+
+    def get(self, option):
+        try:
+            stdout, stderr = subprocess.Popen(
+                [self.cmd, option],
+                stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
+        except OSError as ex:
+            # e.g., [Errno 2] No such file or directory
+            raise OSError("Could not find geos-config script")
+        if stderr and not stdout:
+            raise ValueError(stderr.strip())
+        if sys.version_info[0] >= 3:
+            result = stdout.decode('ascii').strip()
+        else:
+            result = stdout.strip()
+        log.debug('%s %s: %r', self.cmd, option, result)
+        return result
+
+    def version(self):
+        match = re.match(r'(\d+)\.(\d+)\.(\d+)', self.get('--version').strip())
+        return tuple(map(int, match.groups()))
+
+
 # Get the version from the shapely module
 shapely_version = None
 with open('shapely/__init__.py', 'r') as fp:
@@ -60,14 +132,28 @@ if not shapely_version:
 # Fail installation if the GEOS shared library does not meet the minimum
 # version. We ship it with Shapely for Windows, so no need to check on
 # that platform.
-log.debug('GEOS shared library: %s %s', geos_version_string, geos_version)
-if (set(sys.argv).intersection(['install', 'build', 'build_ext']) and
-        shapely_version >= Version('1.3') and
-        geos_version < (3, 3)):
-    log.critical(
-        "Shapely >= 1.3 requires GEOS >= 3.3. "
-        "Install GEOS 3.3+ and reinstall Shapely.")
-    sys.exit(1)
+
+geos_version = None
+geos_config = GEOSConfig(os.environ.get('GEOS_CONFIG', 'geos-config'))
+
+if not os.environ.get('NO_GEOS_CHECK') or sys.platform == 'win32':
+    try:
+        log.info(
+            "Shapely >= 1.3 requires GEOS >= 3.3. "
+            "Checking for GEOS version...")
+        geos_version = geos_config.version()
+        log.info("Found GEOS version: %s", geos_version)
+        if (set(sys.argv).intersection(['install', 'build', 'build_ext']) and
+                shapely_version >= Version("1.3") and geos_version < (3, 3)):
+            log.critical(
+                "Shapely >= 1.3 requires GEOS >= 3.3. "
+                "Install GEOS 3.3+ and reinstall Shapely.")
+            sys.exit(1)
+    except OSError as exc:
+        log.warn(
+            "Failed to determine system's GEOS version: %s. "
+            "Installation continuing. GEOS version will be "
+            "checked on import of shapely.", exc)
 
 # Handle UTF-8 encoding of certain text files.
 open_kwds = {}
@@ -88,6 +174,13 @@ with open('CHANGES.txt', 'r', **open_kwds) as fp:
 
 long_description = readme + '\n\n' + credits + '\n\n' + changes
 
+
+extra_reqs = {
+    'test': ['pytest', 'pytest-cov', 'numpy>=1.4.1', 'packaging']
+}
+extra_reqs['all'] = list(it.chain.from_iterable(extra_reqs.values()))
+
+
 setup_args = dict(
     name                = 'Shapely',
     version             = str(shapely_version),
@@ -120,11 +213,14 @@ setup_args = dict(
         'Programming Language :: Python :: 3',
         'Topic :: Scientific/Engineering :: GIS',
     ],
-    data_files         = [('shapely', ['shapely/_geos.pxi'])],
     cmdclass           = {},
+    extras_require     = extra_reqs,
+    package_data={
+        'shapely': ['shapely/_geos.pxi']},
+    include_package_data=True
 )
 
-# Add DLLs for Windows
+# Add DLLs for Windows.
 if sys.platform == 'win32':
     try:
         os.mkdir('shapely/DLLs')
@@ -140,11 +236,7 @@ if sys.platform == 'win32':
     else:
         for dll in glob.glob('DLLs_x86_VC9/*.dll'):
             shutil.copy(dll, 'shapely/DLLs')
-    setup_args.update(
-        package_data={'shapely': ['shapely/DLLs/*.dll']},
-        include_package_data=True,
-    )
-
+    setup_args['package_data']['shapely'].append('shapely/DLLs/*.dll')
 
 # Prepare build opts and args for the speedups extension module.
 include_dirs = []
@@ -152,32 +244,12 @@ library_dirs = []
 libraries = []
 extra_link_args = []
 
-try:
-    # Get the version from geos-config. Show error if this version tuple is
-    # different to the GEOS version loaded from the dynamic library.
-    geos_config_version_string = get_geos_config('--version')
-    res = re.findall(r'(\d+)\.(\d+)\.(\d+)', geos_config_version_string)
-    geos_config_version = tuple(int(x) for x in res[0])
-
-    if geos_config_version != geos_version:
-        log.error("The GEOS dynamic library version is %s %s,",
-                  geos_version_string, geos_version)
-        log.error("but the version reported by %s is %s %s.", geos_config,
-                  geos_config_version_string, geos_config_version)
-        sys.exit(1)
-except OSError as ex:
-    log.error(ex)
-    log.error('Cannot find geos-config to get headers and check version.')
-    log.error('If available, specify a path to geos-config with a '
-              'GEOS_CONFIG environment variable')
-    geos_config = None
-
-if geos_config:
-    # Collect other options from GEOS
-    for item in get_geos_config('--cflags').split():
+if geos_version and geos_config:
+    # Collect other options from GEOS configuration.
+    for item in geos_config.get('--cflags').split():
         if item.startswith("-I"):
             include_dirs.extend(item[2:].split(":"))
-    for item in get_geos_config('--clibs').split():
+    for item in geos_config.get('--clibs').split():
         if item.startswith("-L"):
             library_dirs.extend(item[2:].split(":"))
         elif item.startswith("-l"):
diff --git a/shapely/__init__.py b/shapely/__init__.py
index b0c2f60..5bb6d59 100644
--- a/shapely/__init__.py
+++ b/shapely/__init__.py
@@ -1 +1 @@
-__version__ = "1.5.17"
+__version__ = "1.6a1"
diff --git a/shapely/_buildcfg.py b/shapely/_buildcfg.py
index d692b9d..9230326 100644
--- a/shapely/_buildcfg.py
+++ b/shapely/_buildcfg.py
@@ -197,7 +197,7 @@ elif sys.platform == 'win32':
             original_path = os.environ['PATH']
             os.environ['PATH'] = "%s;%s;%s" % \
                 (egg_dlls, wininst_dlls, original_path)
-            lgeos = CDLL("geos.dll")
+            lgeos = CDLL("geos_c.dll")
         except (ImportError, WindowsError, OSError):
             raise
 
diff --git a/shapely/algorithms/polylabel.py b/shapely/algorithms/polylabel.py
new file mode 100644
index 0000000..67182e2
--- /dev/null
+++ b/shapely/algorithms/polylabel.py
@@ -0,0 +1,128 @@
+from ..geometry import Point, LineString
+from ..geos import TopologicalError
+from heapq import heappush, heappop
+
+
+class Cell(object):
+    """A `Cell`'s centroid property is a potential solution to finding the pole
+    of inaccessibility for a given polygon. Rich comparison operators are used
+    for sorting `Cell` objects in a priority queue based on the potential
+    maximum distance of any theoretical point within a cell to a given
+    polygon's exterior boundary.
+    """
+    def __init__(self, x, y, h, polygon):
+        self.x = x
+        self.y = y
+        self.h = h  # half of cell size
+        self.centroid = Point(x, y)  # cell centroid, potential solution
+
+        # distance from cell centroid to polygon exterior
+        self.distance = self._dist(polygon)
+
+        # max distance to polygon exterior within a cell
+        self.max_distance = self.distance + h * 1.4142135623730951  # sqrt(2)
+
+    # rich comparison operators for sorting in minimum priority queue
+    def __lt__(self, other):
+        return self.max_distance > other.max_distance
+
+    def __le__(self, other):
+        return self.max_distance >= other.max_distance
+
+    def __eq__(self, other):
+        return self.max_distance == other.max_distance
+
+    def __ne__(self, other):
+        return self.max_distance != other.max_distance
+
+    def __gt__(self, other):
+        return self.max_distance < other.max_distance
+
+    def __ge__(self, other):
+        return self.max_distance <= other.max_distance
+
+    def _dist(self, polygon):
+        """Signed distance from Cell centroid to polygon outline. The returned
+        value is negative if the point is outside of the polygon exterior
+        boundary.
+        """
+        inside = polygon.contains(self.centroid)
+        distance = self.centroid.distance(LineString(polygon.exterior.coords))
+        if inside:
+            return distance
+        return -distance
+
+
+def polylabel(polygon, tolerance=1.0):
+    """Finds pole of inaccessibility for a given polygon. Based on
+    Vladimir Agafonkin's https://github.com/mapbox/polylabel
+
+    Parameters
+    ----------
+    polygon : shapely.geometry.Polygon
+    tolerance : int or float, optional
+                `tolerance` represents the highest resolution in units of the
+                input geometry that will be considered for a solution. (default
+                value is 1.0).
+
+    Returns
+    -------
+    shapely.geometry.Point
+        A point representing the pole of inaccessibility for the given input
+        polygon.
+
+    Raises
+    ------
+    shapely.geos.TopologicalError
+        If the input polygon is not a valid geometry.
+
+    Example
+    -------
+    >>> polygon = LineString([(0, 0), (50, 200), (100, 100), (20, 50),
+    ... (-100, -20), (-150, -200)]).buffer(100)
+    >>> label = polylabel(polygon, tolerance=10)
+    >>> label.wkt
+    'POINT (59.35615556364569 121.8391962974644)'
+    """
+    if not polygon.is_valid:
+        raise TopologicalError('Invalid polygon')
+    minx, miny, maxx, maxy = polygon.bounds
+    cell_size = min(maxx - minx, maxy - miny)
+    h = cell_size / 2.0
+    cell_queue = []
+
+    # First best cell approximation is one constructed from the centroid
+    # of the polygon
+    x, y = polygon.centroid.coords[0]
+    best_cell = Cell(x, y, 0, polygon)
+
+    # build a regular square grid covering the polygon
+    x = minx
+    while x < maxx:
+        y = miny
+        while y < maxy:
+            heappush(cell_queue, Cell(x + h, y + h, h, polygon))
+            y += cell_size
+        x += cell_size
+
+    # minimum priority queue
+    while cell_queue:
+        cell = heappop(cell_queue)
+
+        # update the best cell if we find a better one
+        if cell.distance > best_cell.distance:
+            best_cell = cell
+
+        # continue to the next iteration if we cant find a better solution
+        # based on tolerance
+        if cell.max_distance - best_cell.distance <= tolerance:
+            continue
+
+        # split the cell into quadrants
+        h = cell.h / 2.0
+        heappush(cell_queue, Cell(cell.x - h, cell.y - h, h, polygon))
+        heappush(cell_queue, Cell(cell.x + h, cell.y - h, h, polygon))
+        heappush(cell_queue, Cell(cell.x - h, cell.y + h, h, polygon))
+        heappush(cell_queue, Cell(cell.x + h, cell.y + h, h, polygon))
+
+    return best_cell.centroid
diff --git a/shapely/ctypes_declarations.py b/shapely/ctypes_declarations.py
index 4716212..da7705a 100644
--- a/shapely/ctypes_declarations.py
+++ b/shapely/ctypes_declarations.py
@@ -4,7 +4,10 @@ See header file: geos-x.y.z/capi/geos_c.h
 '''
 
 from ctypes import CFUNCTYPE, POINTER, c_void_p, c_char_p, \
-    c_size_t, c_byte, c_char, c_uint, c_int, c_double, py_object
+    c_size_t, c_byte, c_uint, c_int, c_double, py_object
+
+
+EXCEPTION_HANDLER_FUNCTYPE = CFUNCTYPE(None, c_char_p, c_void_p)
 
 # Derived pointer types
 c_size_t_p = POINTER(c_size_t)
@@ -14,21 +17,21 @@ class allocated_c_char_p(c_char_p):
     '''char pointer return type'''
     pass
 
-EXCEPTION_HANDLER_FUNCTYPE = CFUNCTYPE(None, c_char_p, c_void_p)
-
 
 def prototype(lgeos, geos_version):
-    '''Protype functions in geos_c.h for different version of GEOS
+    """Protype functions in geos_c.h for different version of GEOS
 
     Use the GEOS version, not the C API version.
-    '''
+    """
+
+    if not geos_version >= (3, 3, 0):
+        raise RuntimeError("Shapely requires GEOS version 3.3.0 or newer.")
 
-    '''
-    Initialization, cleanup, version
-    '''
+    # Initialization, cleanup, version.
 
     lgeos.initGEOS.restype = None
-    lgeos.initGEOS.argtypes = [EXCEPTION_HANDLER_FUNCTYPE, EXCEPTION_HANDLER_FUNCTYPE]
+    lgeos.initGEOS.argtypes = [
+        EXCEPTION_HANDLER_FUNCTYPE, EXCEPTION_HANDLER_FUNCTYPE]
 
     lgeos.finishGEOS.restype = None
     lgeos.finishGEOS.argtypes = []
@@ -36,10 +39,8 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSversion.restype = c_char_p
     lgeos.GEOSversion.argtypes = []
 
-    '''
-    NOTE - These functions are DEPRECATED.  Please use the new Reader and
-    writer APIS!
-    '''
+    # These functions are DEPRECATED.  Please use the new Reader and
+    # writer APIS!
 
     lgeos.GEOSGeomFromWKT.restype = c_void_p
     lgeos.GEOSGeomFromWKT.argtypes = [c_char_p]
@@ -56,9 +57,7 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSGeomToWKB_buf.restype = allocated_c_char_p
     lgeos.GEOSGeomToWKB_buf.argtypes = [c_void_p, c_size_t_p]
 
-    '''
-    Coordinate sequence
-    '''
+    # Coordinate sequence
 
     lgeos.GEOSCoordSeq_create.restype = c_void_p
     lgeos.GEOSCoordSeq_create.argtypes = [c_uint, c_uint]
@@ -82,7 +81,8 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSCoordSeq_setZ.argtypes = [c_void_p, c_uint, c_double]
 
     lgeos.GEOSCoordSeq_setOrdinate.restype = c_int
-    lgeos.GEOSCoordSeq_setOrdinate.argtypes = [c_void_p, c_uint, c_uint, c_double]
+    lgeos.GEOSCoordSeq_setOrdinate.argtypes = [
+        c_void_p, c_uint, c_uint, c_double]
 
     lgeos.GEOSCoordSeq_getX.restype = c_int
     lgeos.GEOSCoordSeq_getX.argtypes = [c_void_p, c_uint, c_void_p]
@@ -99,9 +99,7 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSCoordSeq_getDimensions.restype = c_int
     lgeos.GEOSCoordSeq_getDimensions.argtypes = [c_void_p, c_void_p]
 
-    '''
-    Linear refeferencing
-    '''
+    # Linear refeferencing
 
     if geos_version >= (3, 2, 0):
 
@@ -117,9 +115,7 @@ def prototype(lgeos, geos_version):
         lgeos.GEOSInterpolateNormalized.restype = c_void_p
         lgeos.GEOSInterpolateNormalized.argtypes = [c_void_p, c_double]
 
-    '''
-    Buffer related
-    '''
+    # Buffer related
 
     lgeos.GEOSBuffer.restype = c_void_p
     lgeos.GEOSBuffer.argtypes = [c_void_p, c_double, c_int]
@@ -127,22 +123,23 @@ def prototype(lgeos, geos_version):
     if geos_version >= (3, 2, 0):
 
         lgeos.GEOSBufferWithStyle.restype = c_void_p
-        lgeos.GEOSBufferWithStyle.argtypes = [c_void_p, c_double, c_int, c_int, c_int, c_double]
+        lgeos.GEOSBufferWithStyle.argtypes = [
+            c_void_p, c_double, c_int, c_int, c_int, c_double]
 
         if geos_version >= (3, 3, 0):
 
             lgeos.GEOSOffsetCurve.restype = c_void_p
-            lgeos.GEOSOffsetCurve.argtypes = [c_void_p, c_double, c_int, c_int, c_double]
-        
+            lgeos.GEOSOffsetCurve.argtypes = [
+                c_void_p, c_double, c_int, c_int, c_double]
+
         else:
 
             # deprecated in GEOS 3.3.0 in favour of GEOSOffsetCurve
             lgeos.GEOSSingleSidedBuffer.restype = c_void_p
-            lgeos.GEOSSingleSidedBuffer.argtypes = [c_void_p, c_double, c_int, c_int, c_double, c_int]
+            lgeos.GEOSSingleSidedBuffer.argtypes = [
+                c_void_p, c_double, c_int, c_int, c_double, c_int]
 
-    '''
-    Geometry constructors
-    '''
+    # Geometry constructors
 
     lgeos.GEOSGeom_createPoint.restype = c_void_p
     lgeos.GEOSGeom_createPoint.argtypes = [c_void_p]
@@ -159,23 +156,20 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSGeom_createCollection.restype = c_void_p
     lgeos.GEOSGeom_createCollection.argtypes = [c_int, c_void_p, c_uint]
 
-    lgeos.GEOSGeom_createEmptyCollection.restype = c_void_p
-    lgeos.GEOSGeom_createEmptyCollection.argtypes = [c_int]
+    if geos_version >= (3, 3, 0):
+        lgeos.GEOSGeom_createEmptyCollection.restype = c_void_p
+        lgeos.GEOSGeom_createEmptyCollection.argtypes = [c_int]
 
     lgeos.GEOSGeom_clone.restype = c_void_p
     lgeos.GEOSGeom_clone.argtypes = [c_void_p]
 
-    '''
-    Memory management
-    '''
+    # Memory management
 
     lgeos.GEOSGeom_destroy.restype = None
     lgeos.GEOSGeom_destroy.argtypes = [c_void_p]
 
-    '''
-    Topology operations
-    Return NULL on exception
-    '''
+    # Topology operations
+    # Return NULL on exception
 
     lgeos.GEOSEnvelope.restype = c_void_p
     lgeos.GEOSEnvelope.argtypes = [c_void_p]
@@ -203,7 +197,7 @@ def prototype(lgeos, geos_version):
         lgeos.GEOSUnaryUnion.argtypes = [c_void_p]
 
     if geos_version >= (3, 1, 0):
-        '''deprecated in 3.3.0: use GEOSUnaryUnion instead'''
+        # deprecated in 3.3.0: use GEOSUnaryUnion instead
         lgeos.GEOSUnionCascaded.restype = c_void_p
         lgeos.GEOSUnionCascaded.argtypes = [c_void_p]
 
@@ -218,13 +212,13 @@ def prototype(lgeos, geos_version):
 
     if geos_version >= (3, 3, 0):
         lgeos.GEOSPolygonize_full.restype = c_void_p
-        lgeos.GEOSPolygonize_full.argtypes = [c_void_p, c_void_p, c_void_p, c_void_p]
+        lgeos.GEOSPolygonize_full.argtypes = [
+            c_void_p, c_void_p, c_void_p, c_void_p]
 
     if geos_version >= (3, 4, 0):
         lgeos.GEOSDelaunayTriangulation.restype = c_void_p
         lgeos.GEOSDelaunayTriangulation.argtypes = [c_void_p, c_double, c_int]
 
-
     lgeos.GEOSLineMerge.restype = c_void_p
     lgeos.GEOSLineMerge.argtypes = [c_void_p]
 
@@ -234,10 +228,8 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSTopologyPreserveSimplify.restype = c_void_p
     lgeos.GEOSTopologyPreserveSimplify.argtypes = [c_void_p, c_double]
 
-    '''
-    Binary predicates
-    Return 2 on exception, 1 on true, 0 on false
-    '''
+    # Binary predicates
+    # Return 2 on exception, 1 on true, 0 on false
 
     lgeos.GEOSDisjoint.restype = c_byte
     lgeos.GEOSDisjoint.argtypes = [c_void_p, c_void_p]
@@ -269,10 +261,8 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSEqualsExact.restype = c_byte
     lgeos.GEOSEqualsExact.argtypes = [c_void_p, c_void_p, c_double]
 
-    '''
-    Unary predicate
-    Return 2 on exception, 1 on true, 0 on false
-    '''
+    # Unary predicate
+    # Return 2 on exception, 1 on true, 0 on false
 
     lgeos.GEOSisEmpty.restype = c_byte
     lgeos.GEOSisEmpty.argtypes = [c_void_p]
@@ -297,9 +287,7 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSHasZ.restype = c_byte
     lgeos.GEOSHasZ.argtypes = [c_void_p]
 
-    '''
-    Dimensionally Extended 9 Intersection Model related
-    '''
+    # Dimensionally Extended 9 Intersection Model related
 
     lgeos.GEOSRelate.restype = allocated_c_char_p
     lgeos.GEOSRelate.argtypes = [c_void_p, c_void_p]
@@ -311,10 +299,8 @@ def prototype(lgeos, geos_version):
         lgeos.GEOSRelatePatternMatch.restype = c_byte
         lgeos.GEOSRelatePatternMatch.argtypes = [c_char_p, c_char_p]
 
-    '''
-    Prepared Geometry Binary predicates
-    Return 2 on exception, 1 on true, 0 on false
-    '''
+    # Prepared Geometry Binary predicates
+    # Return 2 on exception, 1 on true, 0 on false
 
     if geos_version >= (3, 1, 0):
 
@@ -351,10 +337,7 @@ def prototype(lgeos, geos_version):
         lgeos.GEOSPreparedCovers.restype = c_byte
         lgeos.GEOSPreparedCovers.argtypes = [c_void_p, c_void_p]
 
-
-    '''
-    Geometry info
-    '''
+    # Geometry info
 
     lgeos.GEOSGeomType.restype = c_char_p
     lgeos.GEOSGeomType.argtypes = [c_void_p]
@@ -392,9 +375,7 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSGeom_getDimensions.restype = c_int
     lgeos.GEOSGeom_getDimensions.argtypes = [c_void_p]
 
-    '''
-    Misc functions
-    '''
+    # Misc functions
 
     lgeos.GEOSArea.restype = c_double
     lgeos.GEOSArea.argtypes = [c_void_p, c_void_p]
@@ -405,11 +386,12 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSDistance.restype = c_int
     lgeos.GEOSDistance.argtypes = [c_void_p, c_void_p, c_void_p]
 
-    '''
-    Reader and Writer APIs
-    '''
+    if geos_version >= (3, 2, 0):
+        lgeos.GEOSHausdorffDistance.restype = c_int
+        lgeos.GEOSHausdorffDistance.argtypes = [c_void_p, c_void_p, c_void_p]
+
+    # Reader and Writer APIs
 
-    '''WKT Reader'''
     lgeos.GEOSWKTReader_create.restype = c_void_p
     lgeos.GEOSWKTReader_create.argtypes = []
 
@@ -419,7 +401,6 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSWKTReader_read.restype = c_void_p
     lgeos.GEOSWKTReader_read.argtypes = [c_void_p, c_char_p]
 
-    '''WKT Writer'''
     lgeos.GEOSWKTWriter_create.restype = c_void_p
     lgeos.GEOSWKTWriter_create.argtypes = []
 
@@ -446,7 +427,6 @@ def prototype(lgeos, geos_version):
         lgeos.GEOSWKTWriter_setOld3D.restype = None
         lgeos.GEOSWKTWriter_setOld3D.argtypes = [c_void_p, c_int]
 
-    '''WKB Reader'''
     lgeos.GEOSWKBReader_create.restype = c_void_p
     lgeos.GEOSWKBReader_create.argtypes = []
 
@@ -459,7 +439,6 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSWKBReader_readHEX.restype = c_void_p
     lgeos.GEOSWKBReader_readHEX.argtypes = [c_void_p, c_char_p, c_size_t]
 
-    '''WKB Writer'''
     lgeos.GEOSWKBWriter_create.restype = c_void_p
     lgeos.GEOSWKBWriter_create.argtypes = []
 
@@ -491,12 +470,6 @@ def prototype(lgeos, geos_version):
     lgeos.GEOSWKBWriter_setIncludeSRID.argtypes = [c_void_p, c_int]
 
     if geos_version >= (3, 1, 1):
-
-        '''
-        Free buffers returned by stuff like GEOSWKBWriter_write(),
-        GEOSWKBWriter_writeHEX() and GEOSWKTWriter_write()
-        '''
-
         lgeos.GEOSFree.restype = None
         lgeos.GEOSFree.argtypes = [c_void_p]
 
@@ -504,6 +477,9 @@ def prototype(lgeos, geos_version):
         lgeos.GEOSSnap.restype = c_void_p
         lgeos.GEOSSnap.argtypes = [c_void_p, c_void_p, c_double]
 
+        lgeos.GEOSSharedPaths.restype = c_void_p
+        lgeos.GEOSSharedPaths.argtypes = [c_void_p, c_void_p]
+
     if geos_version >= (3, 4, 0):
         lgeos.GEOSNearestPoints.restype = c_void_p
         lgeos.GEOSNearestPoints.argtypes = [c_void_p, c_void_p]
@@ -511,7 +487,8 @@ def prototype(lgeos, geos_version):
     if geos_version >= (3, 4, 2):
         lgeos.GEOSQueryCallback = CFUNCTYPE(None, c_void_p, c_void_p)
 
-        lgeos.GEOSSTRtree_query.argtypes = [c_void_p, c_void_p, lgeos.GEOSQueryCallback, py_object]
+        lgeos.GEOSSTRtree_query.argtypes = [
+            c_void_p, c_void_p, lgeos.GEOSQueryCallback, py_object]
         lgeos.GEOSSTRtree_query.restype = None
 
         lgeos.GEOSSTRtree_create.argtypes = [c_int]
@@ -525,4 +502,3 @@ def prototype(lgeos, geos_version):
 
         lgeos.GEOSSTRtree_destroy.argtypes = [c_void_p]
         lgeos.GEOSSTRtree_destroy.restype = None
-
diff --git a/shapely/geometry/__init__.py b/shapely/geometry/__init__.py
index 50ddbc2..1180498 100644
--- a/shapely/geometry/__init__.py
+++ b/shapely/geometry/__init__.py
@@ -18,3 +18,6 @@ __all__ = [
     'GeometryCollection', 'mapping', 'LinearRing', 'asLinearRing',
     'CAP_STYLE', 'JOIN_STYLE',
 ]
+
+# This needs to be called here to avoid circular references
+import shapely.speedups
diff --git a/shapely/geometry/base.py b/shapely/geometry/base.py
index ad97568..6f8a3a0 100644
--- a/shapely/geometry/base.py
+++ b/shapely/geometry/base.py
@@ -1,23 +1,33 @@
 """Base geometry class and utilities
+
+Note: a third, z, coordinate value may be used when constructing
+geometry objects, but has no effect on geometric analysis. All
+operations are performed in the x-y plane. Thus, geometries with
+different z values may intersect or be equal.
 """
 
-import sys
-from warnings import warn
 from binascii import a2b_hex
 from ctypes import pointer, c_size_t, c_char_p, c_void_p
+from itertools import islice
+import math
+import sys
+from warnings import warn
 
+from shapely.affinity import affine_transform
 from shapely.coords import CoordinateSequence
 from shapely.ftools import wraps
-from shapely.geos import lgeos, ReadingError
 from shapely.geos import WKBWriter, WKTWriter
+from shapely.geos import lgeos, ReadingError
 from shapely.impl import DefaultImplementation, delegated
 
+
 if sys.version_info[0] < 3:
     range = xrange
     integer_types = (int, long)
 else:
     integer_types = (int,)
 
+
 try:
     import numpy as np
     integer_types = integer_types + (np.integer,)
@@ -145,6 +155,7 @@ def geos_geom_from_py(ob, create_func=None):
 
     return geom, N
 
+
 def exceptNull(func):
     """Decorator which helps avoid GEOS operations on null pointers."""
     @wraps(func)
@@ -423,6 +434,10 @@ class BaseGeometry(object):
         """Unitless distance to other geometry (float)"""
         return self.impl['distance'](self, other)
 
+    def hausdorff_distance(self, other):
+        """Unitless hausdorff distance to other geometry (float)"""
+        return self.impl['hausdorff_distance'](self, other)
+
     @property
     def length(self):
         """Unitless length of the geometry (float)"""
@@ -474,6 +489,45 @@ class BaseGeometry(object):
         """A figure that envelopes the geometry"""
         return geom_factory(self.impl['envelope'](self))
 
+    @property
+    def minimum_rotated_rectangle(self):
+        """Returns the general minimum bounding rectangle of
+        the geometry. Can possibly be rotated. If the convex hull
+        of the object is a degenerate (line or point) this same degenerate
+        is returned.
+        """
+        # first compute the convex hull
+        hull = self.convex_hull
+        try:
+            coords = hull.exterior.coords
+        except AttributeError:  # may be a Point or a LineString
+            return hull
+        # generate the edge vectors between the convex hull's coords
+        edges = ((pt2[0] - pt1[0], pt2[1] - pt1[1]) for pt1, pt2 in zip(
+            coords, islice(coords, 1, None)))
+
+        def _transformed_rects():
+            for dx, dy in edges:
+                # compute the normalized direction vector of the edge
+                # vector.
+                length = math.sqrt(dx ** 2 + dy ** 2)
+                ux, uy = dx / length, dy / length
+                # compute the normalized perpendicular vector
+                vx, vy = -uy, ux
+                # transform hull from the original coordinate system to
+                # the coordinate system defined by the edge and compute
+                # the axes-parallel bounding rectangle.
+                transf_rect = affine_transform(
+                    hull, (ux, uy, vx, vy, 0, 0)).envelope
+                # yield the transformed rectangle and a matrix to
+                # transform it back to the original coordinate system.
+                yield (transf_rect, (ux, vx, uy, vy, 0, 0))
+
+        # check for the minimum area rectangle and return it
+        transf_rect, inv_matrix = min(
+            _transformed_rects(), key=lambda r: r[0].area)
+        return affine_transform(transf_rect, inv_matrix)
+
     def buffer(self, distance, resolution=16, quadsegs=None,
                cap_style=CAP_STYLE.round, join_style=JOIN_STYLE.round,
                mitre_limit=5.0):
@@ -666,7 +720,6 @@ class BaseGeometry(object):
     def equals_exact(self, other, tolerance):
         """Returns True if geometries are equal to within a specified
         tolerance"""
-        # return BinaryPredicateOp('equals_exact', self)(other, tolerance)
         return bool(self.impl['equals_exact'](self, other, tolerance))
 
     def almost_equals(self, other, decimal=6):
diff --git a/shapely/geometry/linestring.py b/shapely/geometry/linestring.py
index 6ddd010..8be43bb 100644
--- a/shapely/geometry/linestring.py
+++ b/shapely/geometry/linestring.py
@@ -91,7 +91,9 @@ class LineString(BaseGeometry):
     # Coordinate access
     def _set_coords(self, coordinates):
         self.empty()
-        self._geom, self._ndim = geos_linestring_from_py(coordinates)
+        ret = geos_linestring_from_py(coordinates)
+        if ret is not None:
+            self._geom, self._ndim = ret
 
     coords = property(BaseGeometry._get_coords, _set_coords)
 
@@ -253,9 +255,8 @@ def geos_linestring_from_py(ob, update_geom=None, update_ndim=0):
             ob = list(ob)
             m = len(ob)
 
-        if m < 2:
-            raise ValueError(
-                "LineStrings must have at least 2 coordinate tuples")
+        if m == 0:
+            return None
 
         def _coords(o):
             if isinstance(o, Point):
diff --git a/shapely/geometry/polygon.py b/shapely/geometry/polygon.py
index 91f33fc..e492fae 100644
--- a/shapely/geometry/polygon.py
+++ b/shapely/geometry/polygon.py
@@ -65,7 +65,9 @@ class LinearRing(LineString):
 
     def _set_coords(self, coordinates):
         self.empty()
-        self._geom, self._ndim = geos_linearring_from_py(coordinates)
+        ret = geos_linearring_from_py(coordinates)
+        if ret is not None:
+            self._geom, self._ndim = ret
 
     coords = property(_get_coords, _set_coords)
 
@@ -226,7 +228,11 @@ class Polygon(BaseGeometry):
         BaseGeometry.__init__(self)
 
         if shell is not None:
-            self._geom, self._ndim = geos_polygon_from_py(shell, holes)
+            ret = geos_polygon_from_py(shell, holes)
+            if ret is not None:
+                self._geom, self._ndim = ret
+            else:
+                self.empty()
 
     @property
     def exterior(self):
@@ -333,6 +339,15 @@ class Polygon(BaseGeometry):
             'stroke-width="{0}" opacity="0.6" d="{1}" />'
             ).format(2. * scale_factor, path, fill_color)
 
+    @classmethod
+    def from_bounds(cls, xmin, ymin, xmax, ymax):
+        """Construct a `Polygon()` from spatial bounds."""
+        return cls([
+            (xmin, ymin),
+            (xmin, ymax),
+            (xmax, ymax),
+            (xmax, ymin)])
+
 
 class PolygonAdapter(PolygonProxy, Polygon):
 
@@ -447,6 +462,9 @@ def geos_linearring_from_py(ob, update_geom=None, update_ndim=0):
             ob = list(ob)
             m = len(ob)
 
+        if m == 0:
+            return None
+
         n = len(ob[0])
         if m < 3:
             raise ValueError(
@@ -505,7 +523,10 @@ def geos_polygon_from_py(shell, holes=None):
         return geos_geom_from_py(shell)
 
     if shell is not None:
-        geos_shell, ndim = geos_linearring_from_py(shell)
+        ret = geos_linearring_from_py(shell)
+        if ret is None:
+            return None
+        geos_shell, ndim = ret
         if holes is not None and len(holes) > 0:
             ob = holes
             L = len(ob)
diff --git a/shapely/geos.py b/shapely/geos.py
index f4b7cf9..190f909 100644
--- a/shapely/geos.py
+++ b/shapely/geos.py
@@ -2,15 +2,16 @@
 Proxies for libgeos, GEOS-specific exceptions, and utilities
 """
 
+import atexit
+from ctypes import (
+    CDLL, cdll, pointer, string_at, DEFAULT_MODE, c_void_p, c_size_t, c_char_p)
+from ctypes.util import find_library
+import glob
+import logging
 import os
 import re
 import sys
-import atexit
-import logging
 import threading
-from ctypes import CDLL, cdll, pointer, string_at, cast, POINTER, DEFAULT_MODE
-from ctypes import c_void_p, c_size_t, c_char_p, c_int, c_float
-from ctypes.util import find_library
 
 from .ctypes_declarations import prototype, EXCEPTION_HANDLER_FUNCTYPE
 from . import ftools
@@ -63,7 +64,16 @@ def load_dll(libname, fallbacks=None, mode=DEFAULT_MODE):
 _lgeos = None
 
 if sys.platform.startswith('linux'):
-    _lgeos = load_dll('geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so'])
+    # Test to see if we have a wheel repaired by 'auditwheel' containing its
+    # own libgeos_c
+    geos_whl_so = glob.glob(os.path.abspath(os.path.join(os.path.dirname(
+        __file__), '.libs/libgeos_c-*.so.*')))
+    if len(geos_whl_so) == 1:
+        _lgeos = CDLL(geos_whl_so[0])
+        LOG.debug("Found GEOS DLL: %r, using it.", _lgeos)
+    else:
+        _lgeos = load_dll(
+            'geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so'])
     free = load_dll('c').free
     free.argtypes = [c_void_p]
     free.restype = None
@@ -80,12 +90,16 @@ elif sys.platform == 'darwin':
         if hasattr(sys, 'frozen'):
             try:
                 # .app file from py2app
-                alt_paths = [os.path.join(os.environ['RESOURCEPATH'],
-                            '..', 'Frameworks', 'libgeos_c.dylib')]
+                alt_paths = [os.path.join(
+                    os.environ['RESOURCEPATH'], '..', 'Frameworks',
+                    'libgeos_c.dylib')]
             except KeyError:
                 # binary from pyinstaller
                 alt_paths = [
                     os.path.join(sys.executable, 'libgeos_c.dylib')]
+                if hasattr(sys, '_MEIPASS'):
+                    alt_paths.append(
+                        os.path.join(sys._MEIPASS, 'libgeos_c.1.dylib'))
         else:
             alt_paths = [
                 # The Framework build from Kyng Chaos
@@ -111,7 +125,7 @@ elif sys.platform == 'win32':
         original_path = os.environ['PATH']
         os.environ['PATH'] = "%s;%s;%s" % \
             (egg_dlls, wininst_dlls, original_path)
-        _lgeos = CDLL("geos.dll")
+        _lgeos = CDLL("geos_c.dll")
     except (ImportError, WindowsError, OSError):
         raise
 
@@ -135,11 +149,9 @@ else:  # other *nix systems
 
 
 def _geos_version():
-    # extern const char GEOS_DLL *GEOSversion();
     GEOSversion = _lgeos.GEOSversion
     GEOSversion.restype = c_char_p
     GEOSversion.argtypes = []
-    #define GEOS_CAPI_VERSION "@VERSION at -CAPI-@CAPI_VERSION@"
     geos_version_string = GEOSversion()
     if sys.version_info[0] >= 3:
         geos_version_string = geos_version_string.decode('ascii')
@@ -205,17 +217,20 @@ class TopologicalError(Exception):
 class PredicateError(Exception):
     pass
 
-# While this function can take any number of positional arguments when
-# called from Python and GEOS expects its error handler to accept any
-# number of arguments (like printf), I'm unable to get ctypes to make
-# a callback object from this function that will accept any number of
-# arguments.
-#
-# At the moment, functions in the GEOS C API only pass 0 or 1 arguments
-# to the error handler. We can deal with this, but when if that changes,
-# Shapely may break.
 
 def handler(level):
+    """Error handler
+
+    While this function can take any number of positional arguments when
+    called from Python and GEOS expects its error handler to accept any
+    number of arguments (like printf), I'm unable to get ctypes to make
+    a callback object from this function that will accept any number of
+    arguments.
+
+    At the moment, functions in the GEOS C API only pass 0 or
+    1 arguments to the error handler. We can deal with this, but when if
+    that changes, Shapely may break.
+    """
     def callback(fmt, *args):
         fmt = fmt.decode('ascii')
         conversions = re.findall(r'%.', fmt)
@@ -535,13 +550,20 @@ def errcheck_just_free(result, func, argtuple):
     else:
         return retval
 
+
 def errcheck_null_exception(result, func, argtuple):
-    """Wraps errcheck_just_free, raising a TopologicalError if result is NULL"""
+    """Wraps errcheck_just_free
+
+    Raises TopologicalError if result is NULL.
+    """
     if not result:
-        raise TopologicalError("The operation '{0}' could not be performed."
-            "Likely cause is invalidity of the geometry.".format(func.__name__))
+        raise TopologicalError(
+            "The operation '{0}' could not be performed."
+            "Likely cause is invalidity of the geometry.".format(
+                func.__name__))
     return errcheck_just_free(result, func, argtuple)
 
+
 def errcheck_predicate(result, func, argtuple):
     """Result is 2 on exception, 1 on True, 0 on False"""
     if result == 2:
@@ -758,12 +780,16 @@ class LGEOS320(LGEOS311):
         super(LGEOS320, self).__init__(dll)
 
         if geos_version >= (3, 2, 0):
-            def parallel_offset(geom, distance, resolution=16, join_style=1, mitre_limit=5.0, side='right'):
+
+            def parallel_offset(geom, distance, resolution=16, join_style=1,
+                                mitre_limit=5.0, side='right'):
                 side = side == 'left'
                 if distance < 0:
                     distance = abs(distance)
                     side = not side
-                return self.GEOSSingleSidedBuffer(geom, distance, resolution, join_style, mitre_limit, side)
+                return self.GEOSSingleSidedBuffer(
+                    geom, distance, resolution, join_style, mitre_limit, side)
+
             self.methods['parallel_offset'] = parallel_offset
 
         self.methods['project'] = self.GEOSProject
@@ -772,6 +798,7 @@ class LGEOS320(LGEOS311):
         self.methods['interpolate_normalized'] = \
             self.GEOSInterpolateNormalized
         self.methods['buffer_with_style'] = self.GEOSBufferWithStyle
+        self.methods['hausdorff_distance'] = self.GEOSHausdorffDistance
 
 
 class LGEOS330(LGEOS320):
@@ -794,16 +821,19 @@ class LGEOS330(LGEOS320):
         for pred in (self.GEOSisClosed,):
             pred.func.errcheck = errcheck_predicate
 
-        def parallel_offset(geom, distance, resolution=16, join_style=1, mitre_limit=5.0, side='right'):
+        def parallel_offset(geom, distance, resolution=16, join_style=1,
+                            mitre_limit=5.0, side='right'):
             if side == 'right':
                 distance *= -1
-            return self.GEOSOffsetCurve(geom, distance, resolution, join_style, mitre_limit)
-        self.methods['parallel_offset'] = parallel_offset
+            return self.GEOSOffsetCurve(
+                geom, distance, resolution, join_style, mitre_limit)
 
+        self.methods['parallel_offset'] = parallel_offset
         self.methods['unary_union'] = self.GEOSUnaryUnion
         self.methods['is_closed'] = self.GEOSisClosed
         self.methods['cascaded_union'] = self.methods['unary_union']
         self.methods['snap'] = self.GEOSSnap
+        self.methods['shared_paths'] = self.GEOSSharedPaths
 
 
 class LGEOS340(LGEOS330):
diff --git a/shapely/impl.py b/shapely/impl.py
index 828e0ae..99a2d49 100644
--- a/shapely/impl.py
+++ b/shapely/impl.py
@@ -140,6 +140,7 @@ IMPL320 = {
     'interpolate_normalized': (InterpolateOp, 'interpolate_normalized'),
     'interpolate': (InterpolateOp, 'interpolate'),
     'buffer_with_style': (UnaryTopologicalOp, 'buffer_with_style'),
+    'hausdorff_distance': (BinaryRealProperty, 'hausdorff_distance'),
     }
 
 IMPL330 = {
diff --git a/shapely/ops.py b/shapely/ops.py
index b1aff95..ebefeb2 100644
--- a/shapely/ops.py
+++ b/shapely/ops.py
@@ -12,10 +12,12 @@ from ctypes import byref, c_void_p, c_double
 
 from shapely.geos import lgeos
 from shapely.geometry.base import geom_factory, BaseGeometry
-from shapely.geometry import asShape, asLineString, asMultiLineString, Point
+from shapely.geometry import asShape, asLineString, asMultiLineString, Point, MultiPoint,\
+                             LineString, MultiLineString, Polygon, MultiPolygon, GeometryCollection
+from shapely.algorithms.polylabel import polylabel
 
 __all__ = ['cascaded_union', 'linemerge', 'operator', 'polygonize',
-           'polygonize_full', 'transform', 'unary_union', 'triangulate']
+           'polygonize_full', 'transform', 'unary_union', 'triangulate', 'split']
 
 
 class CollectionOperator(object):
@@ -301,3 +303,173 @@ def snap(g1, g2, tolerance):
     'LINESTRING (0 0, 1 1, 2 1, 2.6 0.5)'
     """
     return(geom_factory(lgeos.methods['snap'](g1._geom, g2._geom, tolerance)))
+
+def shared_paths(g1, g2):
+    """Find paths shared between the two given lineal geometries
+
+    Returns a GeometryCollection with two elements:
+     - First element is a MultiLineString containing shared paths with the
+       same direction for both inputs.
+     - Second element is a MultiLineString containing shared paths with the
+       opposite direction for the two inputs.
+
+    Parameters
+    ----------
+    g1 : geometry
+        The first geometry
+    g2 : geometry
+        The second geometry
+    """
+    if not isinstance(g1, LineString):
+        raise TypeError("First geometry must be a LineString")
+    if not isinstance(g2, LineString):
+        raise TypeError("Second geometry must be a LineString")
+    return(geom_factory(lgeos.methods['shared_paths'](g1._geom, g2._geom)))
+
+
+class SplitOp(object):
+
+    @staticmethod
+    def _split_polygon_with_line(poly, splitter):
+        """Split a Polygon with a LineString"""
+
+        assert(isinstance(poly, Polygon))
+        assert(isinstance(splitter, LineString))
+
+        union = poly.boundary.union(splitter)
+
+        # some polygonized geometries may be holes, we do not want them
+        # that's why we test if the original polygon (poly) contains 
+        # an inner point of polygonized geometry (pg)
+        return [pg for pg in polygonize(union) if poly.contains(pg.representative_point())]
+
+    @staticmethod
+    def _split_line_with_line(line, splitter):
+        """Split a LineString with another (Multi)LineString or (Multi)Polygon"""
+        
+        # if splitter is a polygon, pick it's boundary
+        if splitter.type in ('Polygon', 'MultiPolygon'):
+            splitter = splitter.boundary
+
+        assert(isinstance(line, LineString))
+        assert(isinstance(splitter, LineString) or isinstance(splitter, MultiLineString))
+        
+        if splitter.crosses(line):
+            # The lines cross --> return multilinestring from the split
+            return line.difference(splitter)
+        elif splitter.relate_pattern(line, '1********'):
+            # The lines overlap at some segment (linear intersection of interiors)
+            raise ValueError('Input geometry segment overlaps with the splitter.')
+        else:
+            # The lines do not cross --> return collection with identity line
+            return [line]
+
+    @staticmethod  
+    def _split_line_with_point(line, splitter):
+        """Split a LineString with a Point"""
+
+        assert(isinstance(line, LineString))
+        assert(isinstance(splitter, Point))
+
+        # check if point is in the interior of the line
+        if not line.relate_pattern(splitter, '0********'):
+            # point not on line interior --> return collection with single identity line
+            # (REASONING: Returning a list with the input line reference and creating a 
+            # GeometryCollection at the general split function prevents unnecessary copying 
+            # of linestrings in multipoint splitting function)
+            return [line]
+
+        # point is on line, get the distance from the first point on line
+        distance_on_line = line.project(splitter)
+        coords = list(line.coords)
+        # split the line at the point and create two new lines
+        # TODO: can optimize this by accumulating the computed point-to-point distances
+        for i, p in enumerate(coords):
+            pd = line.project(Point(p))
+            if pd == distance_on_line:
+                return [
+                    LineString(coords[:i+1]), 
+                    LineString(coords[i:])
+                ]
+            elif distance_on_line < pd:
+                # we must interpolate here because the line might use 3D points
+                cp = line.interpolate(distance_on_line)
+                ls1_coords = coords[:i]
+                ls1_coords.append(cp.coords[0])
+                ls2_coords = [cp.coords[0]]
+                ls2_coords.extend(coords[i:])
+                return [LineString(ls1_coords), LineString(ls2_coords)]
+
+    @staticmethod
+    def _split_line_with_multipoint(line, splitter):
+        """Split a LineString with a MultiPoint"""
+
+        assert(isinstance(line, LineString))
+        assert(isinstance(splitter, MultiPoint))
+        
+        chunks = [line]
+        for pt in splitter.geoms:
+            new_chunks = []
+            for chunk in chunks:
+                # add the newly split 2 lines or the same line if not split
+                new_chunks.extend(SplitOp._split_line_with_point(chunk, pt))
+            chunks = new_chunks
+        
+        return chunks
+    
+    @staticmethod
+    def split(geom, splitter):
+        """
+        Splits a geometry by another geometry and returns a collection of geometries. This function is the theoretical
+        opposite of the union of the split geometry parts. If the splitter does not split the geometry, a collection
+        with a single geometry equal to the input geometry is returned.
+        The function supports:
+          - Splitting a (Multi)LineString by a (Multi)Point or (Multi)LineString or (Multi)Polygon
+          - Splitting a (Multi)Polygon by a LineString
+
+        It may be convenient to snap the splitter with low tolerance to the geometry. For example in the case
+        of splitting a line by a point, the point must be exactly on the line, for the line to be correctly split.
+        When splitting a line by a polygon, the boundary of the polygon is used for the operation.
+        When splitting a line by another line, a ValueError is raised if the two overlap at some segment.
+
+        Parameters
+        ----------
+        geom : geometry
+            The geometry to be split
+        splitter : geometry
+            The geometry that will split the input geom
+
+        Example
+        -------
+        >>> pt = Point((1, 1))
+        >>> line = LineString([(0,0), (2,2)])
+        >>> result = split(line, pt)
+        >>> result.wkt
+        'GEOMETRYCOLLECTION (LINESTRING (0 0, 1 1), LINESTRING (1 1, 2 2))'
+        """
+
+        if geom.type in ('MultiLineString', 'MultiPolygon'):
+             return GeometryCollection([i for part in geom.geoms for i in SplitOp.split(part, splitter).geoms])
+
+        elif geom.type == 'LineString':
+            if splitter.type in ('LineString', 'MultiLineString', 'Polygon', 'MultiPolygon'):
+                split_func = SplitOp._split_line_with_line
+            elif splitter.type in ('Point'):
+                split_func = SplitOp._split_line_with_point
+            elif splitter.type in ('MultiPoint'):
+                split_func =  SplitOp._split_line_with_multipoint
+            else:
+                raise ValueError("Splitting a LineString with a %s is not supported" % splitter.type)
+
+        elif geom.type == 'Polygon':
+            if splitter.type == 'LineString':
+                split_func = SplitOp._split_polygon_with_line
+            else:
+                raise ValueError("Splitting a Polygon with a %s is not supported" % splitter.type)
+
+        else:
+            raise ValueError("Splitting %s geometry is not supported" % geom.type)
+
+        return GeometryCollection(split_func(geom, splitter))
+
+split = SplitOp.split
diff --git a/shapely/speedups/__init__.py b/shapely/speedups/__init__.py
index 2e84a1f..cc30606 100644
--- a/shapely/speedups/__init__.py
+++ b/shapely/speedups/__init__.py
@@ -20,10 +20,29 @@ def method_wrapper(f):
         return f(*args, **kwargs)
     return wraps(f)(wrapper)
 
-__all__ = ['available', 'enable', 'disable']
+__all__ = ['available', 'enable', 'disable', 'enabled']
 _orig = {}
 
+# keep track of whether speedups are enabled
+enabled = False
+
 def enable():
+    """Enable Cython speedups
+
+    The shapely.speedups module contains performance enhancements written in C.
+    They are automaticaly installed when Python has access to a compiler and
+    GEOS development headers during installation, and are enabled by default.
+
+    You can check if speedups are installed with the `available` attribute, and
+    check if they have been enabled with the `enabled` attribute.
+
+    >>> from shapely import speedups
+    >>> speedups.available
+    True
+    >>> speedups.enable()
+    >>> speedups.enabled
+    True
+    """
     if not available:
         warnings.warn("shapely.speedups not available", RuntimeWarning)
         return
@@ -50,7 +69,12 @@ def enable():
     affine_transform.__doc__ = shapely.affinity.affine_transform.__doc__
     shapely.affinity.affine_transform = affine_transform
 
+    global enabled
+    enabled = True
+
 def disable():
+    """Disable Cython speedups
+    """
     if not _orig:
         return
 
@@ -60,3 +84,10 @@ def disable():
     polygon.geos_linearring_from_py = _orig['geos_linearring_from_py']
     shapely.affinity.affine_transform = _orig['affine_transform']
     _orig.clear()
+
+    global enabled
+    enabled = False
+
+# if cython speedups are available, use them by default
+if available:
+    enable()
diff --git a/shapely/speedups/_speedups.pyx b/shapely/speedups/_speedups.pyx
index b5e9a6d..458b135 100644
--- a/shapely/speedups/_speedups.pyx
+++ b/shapely/speedups/_speedups.pyx
@@ -123,9 +123,9 @@ def geos_linestring_from_py(ob, update_geom=None, update_ndim=0):
         except TypeError:  # Iterators, e.g. Python 3 zip
             ob = list(ob)
             m = len(ob)
-        if m < 2:
-            raise ValueError(
-                "LineStrings must have at least 2 coordinate tuples")
+
+        if m == 0:
+            return None
 
         def _coords(o):
             if isinstance(o, Point):
@@ -287,6 +287,10 @@ def geos_linearring_from_py(ob, update_geom=None, update_ndim=0):
         except TypeError:  # Iterators, e.g. Python 3 zip
             ob = list(ob)
             m = len(ob)
+
+        if m == 0:
+            return None
+
         n = len(ob[0])
         if m < 3:
             raise ValueError(
diff --git a/shapely/vectorized/_vectorized.pyx b/shapely/vectorized/_vectorized.pyx
index 4586823..f9991ca 100644
--- a/shapely/vectorized/_vectorized.pyx
+++ b/shapely/vectorized/_vectorized.pyx
@@ -75,10 +75,12 @@ cdef _predicated_elementwise(geometry, x, y, predicate fn):
     if x.shape != y.shape:
         raise ValueError('X and Y shapes must be equivalent.')
 
-    x_1d = x.astype(np.float64, copy=False).ravel()
-    y_1d = y.astype(np.float64, copy=False).ravel()
+    if x.dtype != np.float64:
+        x = x.astype(np.float64)
+    if y.dtype != np.float64:
+        y = y.astype(np.float64)
 
-    result = _predicated_1d(geometry, x_1d, y_1d, fn)
+    result = _predicated_1d(geometry, x.ravel(), y.ravel(), fn)
     return result.reshape(x.shape)
 
 
@@ -89,7 +91,7 @@ cdef _predicated_1d(geometry, np.double_t[:] x, np.double_t[:] y, predicate fn):
     
     cdef Py_ssize_t idx
     cdef unsigned int n = x.size
-    cdef np.ndarray[np.uint8_t, ndim=1, cast=True] result = np.empty(n, dtype=np.bool)
+    cdef np.ndarray[np.uint8_t, ndim=1, cast=True] result = np.empty(n, dtype=np.uint8)
     cdef GEOSContextHandle_t geos_handle
     cdef GEOSPreparedGeometry *geos_prepared_geom
     cdef GEOSCoordSequence *cs
@@ -117,4 +119,4 @@ cdef _predicated_1d(geometry, np.double_t[:] x, np.double_t[:] y, predicate fn):
             result[idx] = <np.uint8_t> fn(geos_h, geos_geom, p)
             GEOSGeom_destroy_r(geos_h, p)
 
-    return result
+    return result.view(dtype=np.bool)
diff --git a/tests/conftest.py b/tests/conftest.py
index 3fe9d4e..133c2fd 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -5,6 +5,8 @@ import pytest
 def pytest_addoption(parser):
     parser.addoption("--with-speedups", action="store_true", default=False,
         help="Run tests with speedups.")
+    parser.addoption("--without-speedups", action="store_true", default=False,
+        help="Run tests without speedups.")
 
 def pytest_runtest_setup(item):
     if item.config.getoption("--with-speedups"):
@@ -13,4 +15,10 @@ def pytest_runtest_setup(item):
             print("Speedups have been demanded but are unavailable")
             sys.exit(1)
         shapely.speedups.enable()
+        assert(shapely.speedups.enabled is True)
         print("Speedups enabled for %s." % item.name)
+    elif item.config.getoption("--without-speedups"):
+        import shapely.speedups
+        shapely.speedups.disable()
+        assert(shapely.speedups.enabled is False)
+        print("Speedups disabled for %s." % item.name)
diff --git a/tests/test_emptiness.py b/tests/test_emptiness.py
index 301bad1..541224e 100644
--- a/tests/test_emptiness.py
+++ b/tests/test_emptiness.py
@@ -3,6 +3,7 @@ from shapely.geometry.base import BaseGeometry
 import shapely.geometry as sgeom
 from shapely.geometry.polygon import LinearRing
 
+empty_generator = lambda: iter([])
 
 class EmptinessTestCase(unittest.TestCase):
 
@@ -32,18 +33,27 @@ class EmptinessTestCase(unittest.TestCase):
 
     def test_empty_linestring(self):
         self.assertTrue(sgeom.LineString().is_empty)
+        self.assertTrue(sgeom.LineString(None).is_empty)
+        self.assertTrue(sgeom.LineString([]).is_empty)
+        self.assertTrue(sgeom.LineString(empty_generator()).is_empty)
 
     def test_empty_multilinestring(self):
         self.assertTrue(sgeom.MultiLineString([]).is_empty)
 
     def test_empty_polygon(self):
         self.assertTrue(sgeom.Polygon().is_empty)
+        self.assertTrue(sgeom.Polygon(None).is_empty)
+        self.assertTrue(sgeom.Polygon([]).is_empty)
+        self.assertTrue(sgeom.Polygon(empty_generator()).is_empty)
 
     def test_empty_multipolygon(self):
         self.assertTrue(sgeom.MultiPolygon([]).is_empty)
 
     def test_empty_linear_ring(self):
         self.assertTrue(LinearRing().is_empty)
+        self.assertTrue(LinearRing(None).is_empty)
+        self.assertTrue(LinearRing([]).is_empty)
+        self.assertTrue(LinearRing(empty_generator()).is_empty)
 
 
 def test_suite():
diff --git a/tests/test_minimum_rotated_rectangle.py b/tests/test_minimum_rotated_rectangle.py
new file mode 100644
index 0000000..137effe
--- /dev/null
+++ b/tests/test_minimum_rotated_rectangle.py
@@ -0,0 +1,33 @@
+from . import unittest
+from shapely import geometry
+
+class MinimumRotatedRectangleTestCase(unittest.TestCase):
+    
+    def test_minimum_rectangle(self):
+        poly = geometry.Polygon([(0,1), (1, 2), (2, 1), (1, 0), (0, 1)])
+        rect = poly.minimum_rotated_rectangle
+        self.assertIsInstance(rect, geometry.Polygon)
+        self.assertEqual(rect.area - poly.area < 0.1, True)
+        self.assertEqual(len(rect.exterior.coords), 5)
+        
+        ls = geometry.LineString([(0,1), (1, 2), (2, 1), (1, 0)])
+        rect = ls.minimum_rotated_rectangle
+        self.assertIsInstance(rect, geometry.Polygon)
+        self.assertIsInstance(rect, geometry.Polygon)
+        self.assertEqual(rect.area - ls.convex_hull.area < 0.1, True)
+        self.assertEqual(len(rect.exterior.coords), 5)
+
+    def test_digenerate(self):
+        rect = geometry.Point((0,1)).minimum_rotated_rectangle
+        self.assertIsInstance(rect, geometry.Point)
+        self.assertEqual(len(rect.coords), 1)
+        self.assertEqual(rect.coords[0], (0,1))
+
+        rect = geometry.LineString([(0,0),(2,2)]).minimum_rotated_rectangle
+        self.assertIsInstance(rect, geometry.LineString)
+        self.assertEqual(len(rect.coords), 2)
+        self.assertEqual(rect.coords[0], (0,0))
+        self.assertEqual(rect.coords[1], (2,2))
+
+def test_suite():
+    return unittest.TestLoader().loadTestsFromTestCase(MinimumRotatedRectangleTestCase)
\ No newline at end of file
diff --git a/tests/test_operations.py b/tests/test_operations.py
index 3377646..f25f06a 100644
--- a/tests/test_operations.py
+++ b/tests/test_operations.py
@@ -1,8 +1,9 @@
 from . import unittest
 import pytest
-from shapely.geometry import Point, Polygon, MultiPoint, GeometryCollection
+from shapely.geometry import Point, LineString, Polygon, MultiPoint, \
+                             GeometryCollection
 from shapely.wkt import loads
-from shapely.geos import TopologicalError
+from shapely.geos import TopologicalError, geos_version
 
 class OperationsTestCase(unittest.TestCase):
 
@@ -73,5 +74,13 @@ class OperationsTestCase(unittest.TestCase):
         with pytest.raises(TopologicalError):
             invalid_polygon.relate(invalid_polygon)
 
+    @unittest.skipIf(geos_version < (3, 2, 0), 'GEOS 3.2.0 required')
+    def test_hausdorff_distance(self):
+        point = Point(1, 1)
+        line = LineString([(2, 0), (2, 4), (3, 4)])
+
+        distance = point.hausdorff_distance(line)
+        self.assertEqual(distance, point.distance(Point(3, 4)))
+
 def test_suite():
     return unittest.TestLoader().loadTestsFromTestCase(OperationsTestCase)
diff --git a/tests/test_polygon.py b/tests/test_polygon.py
index 84532b1..cecb616 100644
--- a/tests/test_polygon.py
+++ b/tests/test_polygon.py
@@ -226,5 +226,17 @@ class PolygonTestCase(unittest.TestCase):
         self.assertEqual(polygon1, polygon2)
         self.assertNotEqual(None, polygon_empty1)
 
+    def test_from_bounds(self):
+        xmin, ymin, xmax, ymax = -180, -90, 180, 90
+        coords = [
+            (xmin, ymin),
+            (xmin, ymax),
+            (xmax, ymax),
+            (xmax, ymin)]
+        self.assertEqual(
+            Polygon(coords),
+            Polygon.from_bounds(xmin, ymin, xmax, ymax))
+
+
 def test_suite():
     return unittest.TestLoader().loadTestsFromTestCase(PolygonTestCase)
diff --git a/tests/test_polylabel.py b/tests/test_polylabel.py
new file mode 100644
index 0000000..c644970
--- /dev/null
+++ b/tests/test_polylabel.py
@@ -0,0 +1,63 @@
+from . import unittest
+from shapely.algorithms.polylabel import polylabel, Cell
+from shapely.geometry import LineString, Point, Polygon
+from shapely.geos import TopologicalError
+
+
+class PolylabelTestCase(unittest.TestCase):
+    def test_polylabel(self):
+        """
+        Finds pole of inaccessibility for a polygon with a tolerance of 10
+
+        """
+        polygon = LineString([(0, 0), (50, 200), (100, 100), (20, 50),
+                              (-100, -20), (-150, -200)]).buffer(100)
+        label = polylabel(polygon, tolerance=10)
+        expected = Point(59.35615556364569, 121.8391962974644)
+        self.assertTrue(expected.almost_equals(label))
+
+    def test_invalid_polygon(self):
+        """
+        Makes sure that the polylabel function throws an exception when provided
+        an invalid polygon.
+
+        """
+        bowtie_polygon = Polygon([(0, 0), (0, 20), (10, 10), (20, 20),
+                                  (20, 0), (10, 10), (0, 0)])
+        self.assertRaises(TopologicalError, polylabel, bowtie_polygon)
+
+    def test_cell_sorting(self):
+        """
+        Tests rich comparison operators of Cells for use in the polylabel
+        minimum priority queue.
+
+        """
+        polygon = Point(0, 0).buffer(100)
+        cell1 = Cell(0, 0, 50, polygon)  # closest
+        cell2 = Cell(50, 50, 50, polygon)  # furthest
+        self.assertLess(cell1, cell2)
+        self.assertLessEqual(cell1, cell2)
+        self.assertFalse(cell2 <= cell1)
+        self.assertEqual(cell1, cell1)
+        self.assertFalse(cell1 == cell2)
+        self.assertNotEqual(cell1, cell2)
+        self.assertFalse(cell1 != cell1)
+        self.assertGreater(cell2, cell1)
+        self.assertFalse(cell1 > cell2)
+        self.assertGreaterEqual(cell2, cell1)
+        self.assertFalse(cell1 >= cell2)
+
+    def test_concave_polygon(self):
+        """
+        Finds pole of inaccessibility for a concave polygon and ensures that
+        the point is inside.
+
+        """
+        concave_polygon = LineString([(500, 0), (0, 0), (0, 500),
+                                      (500, 500)]).buffer(100)
+        label = polylabel(concave_polygon)
+        self.assertTrue(concave_polygon.contains(label))
+
+
+def test_suite():
+    return unittest.TestLoader().loadTestsFromTestCase(PolylabelTestCase)
diff --git a/tests/test_shared_paths.py b/tests/test_shared_paths.py
new file mode 100644
index 0000000..7ab2d54
--- /dev/null
+++ b/tests/test_shared_paths.py
@@ -0,0 +1,50 @@
+from . import unittest
+
+from shapely.geometry import Point, LineString, Polygon, MultiLineString, \
+                             GeometryCollection
+from shapely.geos import geos_version
+from shapely.ops import shared_paths
+
+ at unittest.skipIf(geos_version < (3, 3, 0), 'GEOS 3.3.0 required')
+class SharedPaths(unittest.TestCase):
+    def test_shared_paths_forward(self):
+        g1 = LineString([(0, 0), (10, 0), (10, 5), (20, 5)])
+        g2 = LineString([(5, 0), (15, 0)])
+        result = shared_paths(g1, g2)
+        
+        self.assertTrue(isinstance(result, GeometryCollection))
+        self.assertTrue(len(result) == 2)
+        a, b = result
+        self.assertTrue(isinstance(a, MultiLineString))
+        self.assertTrue(len(a) == 1)
+        self.assertEqual(a[0].coords[:], [(5, 0), (10, 0)])
+        self.assertTrue(b.is_empty)
+
+    def test_shared_paths_forward(self):
+        g1 = LineString([(0, 0), (10, 0), (10, 5), (20, 5)])
+        g2 = LineString([(15, 0), (5, 0)])
+        result = shared_paths(g1, g2)
+        
+        self.assertTrue(isinstance(result, GeometryCollection))
+        self.assertTrue(len(result) == 2)
+        a, b = result
+        self.assertTrue(isinstance(b, MultiLineString))
+        self.assertTrue(len(b) == 1)
+        self.assertEqual(b[0].coords[:], [(5, 0), (10, 0)])
+        self.assertTrue(a.is_empty)
+    
+    def test_wrong_type(self):
+        g1 = Point(0, 0)
+        g2 = LineString([(5, 0), (15, 0)])
+        
+        with self.assertRaises(TypeError):
+            result = shared_paths(g1, g2)
+            
+        with self.assertRaises(TypeError):
+            result = shared_paths(g2, g1)
+
+def test_suite():
+    return unittest.TestLoader().loadTestsFromTestCase(SharedPaths)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/test_split.py b/tests/test_split.py
new file mode 100644
index 0000000..dcaae0b
--- /dev/null
+++ b/tests/test_split.py
@@ -0,0 +1,192 @@
+from shapely.ops import split
+
+from . import unittest
+
+from shapely.geometry import Point, MultiPoint, LineString, MultiLineString, Polygon, MultiPolygon, GeometryCollection
+from shapely.ops import cascaded_union, linemerge
+
+class TestSplitGeometry(unittest.TestCase):
+	# helper class for testing below
+	def helper(self, geom, splitter, expected_chunks):	
+		s = split(geom, splitter)
+		self.assertEqual(s.type, "GeometryCollection")
+		self.assertEqual(len(s), expected_chunks)
+		if expected_chunks > 1:
+			# split --> expected collection that when merged is again equal to original geometry
+			if s.geoms[0].type == 'LineString':
+				self.assertTrue(linemerge(s).simplify(0.000001).equals(geom))
+			elif s.geoms[0].type == 'Polygon':
+				union = cascaded_union(s).simplify(0.000001)
+				self.assertTrue(union.equals(geom))
+				self.assertEqual(union.area, geom.area)
+			else:
+				raise ValueError
+		elif expected_chunks == 1:
+			# not split --> expected equal to line
+			self.assertTrue(s[0].equals(geom))
+
+class TestSplitPolygon(TestSplitGeometry):
+	poly_simple = Polygon([(0, 0), (2, 0), (2, 2), (0, 2), (0, 0)])
+	poly_hole = Polygon([(0, 0), (2, 0), (2, 2), (0, 2), (0, 0)], [[(0.5, 0.5), (0.5, 1.5), (1.5, 1.5), (1.5, 0.5), (0.5, 0.5)]])
+
+	def test_split_poly_with_line(self):
+		# crossing at 2 points --> return 2 segments
+		splitter = LineString([(1, 3), (1, -3)])
+		self.helper(self.poly_simple, splitter, 2)
+		self.helper(self.poly_hole, splitter, 2)
+
+		# touching the boundary--> return equal
+		splitter = LineString([(0, 2), (5, 2)])
+		self.helper(self.poly_simple, splitter, 1)
+		self.helper(self.poly_hole, splitter, 1)
+
+		# inside the polygon --> return equal
+		splitter = LineString([(0.2, 0.2), (1.7, 1.7), (3, 2)])
+		self.helper(self.poly_simple, splitter, 1)
+		self.helper(self.poly_hole, splitter, 1)
+
+		# outside the polygon --> return equal
+		splitter = LineString([(0, 3), (3, 3) , (3, 0)])
+		self.helper(self.poly_simple, splitter, 1)
+		self.helper(self.poly_hole, splitter, 1)
+
+	def test_split_poly_with_other(self):
+		with self.assertRaises(ValueError):
+			split(self.poly_simple, Point(1, 1))
+		with self.assertRaises(ValueError):
+			split(self.poly_simple, MultiPoint([(1, 1), (3, 4)]))
+		with self.assertRaises(ValueError):
+			split(self.poly_simple, self.poly_hole)
+
+class TestSplitLine(TestSplitGeometry):
+	ls = LineString([(0, 0), (1.5, 1.5), (3.0, 4.0)])
+
+	def test_split_line_with_point(self):
+		# point on line interior --> return 2 segments
+		splitter = Point(1, 1)
+		self.helper(self.ls, splitter, 2)
+
+		# point on line point --> return 2 segments
+		splitter = Point(1.5, 1.5)
+		self.helper(self.ls, splitter, 2)
+
+		# point on boundary --> return equal
+		splitter = Point(3, 4)
+		self.helper(self.ls, splitter, 1)
+
+		# point on exterior of line --> return equal
+		splitter = Point(2, 2)
+		self.helper(self.ls, splitter, 1)
+
+	def test_split_line_with_multipoint(self):
+		# points on line interior --> return 4 segments
+		splitter = MultiPoint([(1,1), (1.5, 1.5), (0.5, 0.5)])
+		self.helper(self.ls, splitter, 4)
+
+		# points on line interior and boundary -> return 2 segments
+		splitter = MultiPoint([(1, 1), (3, 4)])
+		self.helper(self.ls, splitter, 2)
+
+		# point on linear interior but twice --> return 2 segments
+		splitter = MultiPoint([(1, 1), (1.5, 1.5), (1, 1)])
+		self.helper(self.ls, splitter, 3)
+
+	def test_split_line_with_line(self):
+		# crosses at one point --> return 2 segments
+		splitter = LineString([(0, 1), (1, 0)])
+		self.helper(self.ls, splitter, 2)	
+
+		# crosses at two points --> return 3 segments
+		splitter = LineString([(0, 1), (1, 0), (1, 2)])
+		self.helper(self.ls, splitter, 3)
+
+		# overlaps --> raise
+		splitter = LineString([(0, 0), (15, 15)])
+		with self.assertRaises(ValueError):
+			self.helper(self.ls, splitter, 1)
+
+		# does not cross --> return equal
+		splitter = LineString([(0, 1), (0, 2)])
+		self.helper(self.ls, splitter, 1)
+
+		# is touching the boundary --> return equal
+		splitter = LineString([(-1, 1), (1, -1)])
+		self.assertTrue(splitter.touches(self.ls))
+		self.helper(self.ls, splitter, 1)
+
+	def test_split_line_with_multiline(self):
+		# crosses at one point --> return 2 segments
+		splitter = MultiLineString([[(0, 1), (1, 0)], [(0, 0), (2, -2)]])
+		self.helper(self.ls, splitter, 2)	
+
+		# crosses at two points --> return 3 segments
+		splitter = MultiLineString([[(0, 1), (1, 0)], [(0, 2), (2, 0)]])
+		self.helper(self.ls, splitter, 3)
+
+		# crosses at three points --> return 4 segments
+		splitter = MultiLineString([[(0, 1), (1, 0)], [(0, 2), (2, 0), (2.2, 3.2)]])
+		self.helper(self.ls, splitter, 4)
+
+		# overlaps --> raise
+		splitter = MultiLineString([[(0, 0), (1.5, 1.5)], [(1.5, 1.5), (3, 4)]])
+		with self.assertRaises(ValueError):
+			self.helper(self.ls, splitter, 1)
+
+		# does not cross --> return equal
+		splitter = MultiLineString([[(0, 1), (0, 2)], [(1, 0), (2, 0)]])
+		self.helper(self.ls, splitter, 1)
+
+	def test_split_line_with_polygon(self):
+		# crosses at two points --> return 3 segments
+		splitter = Polygon([(1, 0), (1, 2), (2, 2), (2, 0), (1, 0)])
+		self.helper(self.ls, splitter, 3)
+
+		# crosses at one point and touches boundary --> return 2 segments
+		splitter = Polygon([(0, 0), (1, 2), (2, 2), (1, 0), (0, 0)])
+		self.helper(self.ls, splitter, 2)
+
+		# exterior crosses at one point and touches at (0, 0)
+		# interior crosses at two points
+		splitter = Polygon([(0, 0), (2, 0), (2, 2), (0, 2), (0, 0)], [[(0.5, 0.5), (0.5, 1.5), (1.5, 1.5), (1.5, 0.5), (0.5, 0.5)]])
+		self.helper(self.ls, splitter, 4)
+
+	def test_split_line_with_multipolygon(self):
+		poly1 = Polygon([(0, 0), (2, 0), (2, 2), (0, 2), (0, 0)]) # crosses at one point and touches at (0, 0)
+		poly2 = Polygon([(0.5, 0.5), (0.5, 1.5), (1.5, 1.5), (1.5, 0.5), (0.5, 0.5)]) # crosses at two points
+		poly3 = Polygon([(0, 0), (0, -2), (-2, -2), (-2, 0), (0, 0)]) # not crossing
+		splitter = MultiPolygon([poly1, poly2, poly3])
+		self.helper(self.ls, splitter, 4)
+
+class TestSplitMulti(TestSplitGeometry):
+	
+	def test_split_multiline_with_point(self):
+		# a cross-like multilinestring with a point in the middle --> return 4 line segments
+		l1 = LineString([(0, 1), (2, 1)])
+		l2 = LineString([(1, 0), (1, 2)])
+		ml = MultiLineString([l1, l2])
+		splitter = Point((1, 1))
+		self.helper(ml, splitter, 4)
+		
+	def test_split_multiline_with_multipoint(self):
+		# a cross-like multilinestring with a point in middle, a point on one of the lines and a point in the exterior 
+		# --> return 4+1 line segments
+		l1 = LineString([(0, 1), (3, 1)])
+		l2 = LineString([(1, 0), (1, 2)])
+		ml = MultiLineString([l1, l2])
+		splitter = MultiPoint([(1, 1), (2, 1), (4, 2)])
+		self.helper(ml, splitter, 5)
+				
+	def test_split_multipolygon_with_line(self):
+		# two polygons with a crossing line --> return 4 triangles
+		poly1 = Polygon([(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]) 
+		poly2 = Polygon([(1, 1), (1, 2), (2, 2), (2, 1), (1, 1)])
+		mpoly = MultiPolygon([poly1, poly2])
+		ls = LineString([(-1, -1), (3, 3)])
+		self.helper(mpoly, ls, 4)
+
+		# two polygons away from the crossing line --> return identity
+		poly1 = Polygon([(10, 10), (10, 11), (11, 11), (11, 10), (10, 10)]) 
+		poly2 = Polygon([(-10, -10), (-10, -11), (-11, -11), (-11, -10), (-10, -10)])
+		mpoly = MultiPolygon([poly1, poly2])
+		ls = LineString([(-1, -1), (3, 3)])
+		self.helper(mpoly, ls, 2)
\ No newline at end of file
diff --git a/tests/test_vectorized.py b/tests/test_vectorized.py
index 00673ad..ea1d956 100644
--- a/tests/test_vectorized.py
+++ b/tests/test_vectorized.py
@@ -54,18 +54,18 @@ class VectorizedContainsTestCase(unittest.TestCase):
 
     def test_y_array_order(self):
         y, x = np.mgrid[-10:10:5j, -5:15:5j]
-        y = y.copy(order='f')
+        y = y.copy('f')
         self.assertContainsResults(self.construct_torus(), x, y)
     
     def test_x_array_order(self):
         y, x = np.mgrid[-10:10:5j, -5:15:5j]
-        x = x.copy(order='f')
+        x = x.copy('f')
         self.assertContainsResults(self.construct_torus(), x, y)
     
     def test_xy_array_order(self):
         y, x = np.mgrid[-10:10:5j, -5:15:5j]
-        x = x.copy(order='f')
-        y = y.copy(order='f')
+        x = x.copy('f')
+        y = y.copy('f')
         result = self.assertContainsResults(self.construct_torus(), x, y)
         # We always return a C_CONTIGUOUS array.
         self.assertTrue(result.flags['C_CONTIGUOUS'])

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/python-shapely.git



More information about the Pkg-grass-devel mailing list