[fiona] 02/10: Imported Upstream version 1.7~a1

Sebastiaan Couwenberg sebastic at moszumanska.debian.org
Sat Jun 11 12:40:41 UTC 2016


This is an automated email from the git hooks/post-receive script.

sebastic pushed a commit to branch master
in repository fiona.

commit 2714a9f131feacdfaa6fe2f92013210d17d38d64
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date:   Sat Jun 11 12:39:53 2016 +0200

    Imported Upstream version 1.7~a1
---
 .gitignore                            |  75 +++++
 .travis.yml                           |  50 +++-
 CHANGES.txt                           |  13 +
 CODE_OF_CONDUCT.md                    |  22 ++
 CREDITS.txt                           |  18 +-
 README.rst                            |  13 +-
 docs/cli.rst                          |  48 +++-
 fiona/__init__.py                     |  92 +++---
 fiona/_cpl.pxd                        |  26 ++
 fiona/_crs.pxd                        |  18 ++
 fiona/_crs.pyx                        |  68 +++++
 fiona/_drivers.pyx                    | 130 ---------
 fiona/collection.py                   |  74 +++--
 fiona/crs.py                          |  41 +--
 fiona/{_drivers.pyx => drvsupport.py} | 172 ++---------
 fiona/errors.py                       |   8 -
 fiona/fio/bounds.py                   |  16 +-
 fiona/fio/calc.py                     |  63 ++++
 fiona/fio/cat.py                      | 528 +---------------------------------
 fiona/fio/collect.py                  | 215 ++++++++++++++
 fiona/fio/distrib.py                  |  41 +++
 fiona/fio/dump.py                     | 176 ++++++++++++
 fiona/fio/env.py                      |  29 ++
 fiona/fio/filter.py                   |  55 ++++
 fiona/fio/helpers.py                  |  84 +++++-
 fiona/fio/info.py                     |  94 ++----
 fiona/fio/insp.py                     |  46 +++
 fiona/fio/load.py                     | 107 +++++++
 fiona/fio/ls.py                       |  26 ++
 fiona/fio/options.py                  |   9 +
 fiona/{ograpi.pxd => ograpi1.pxd}     |   2 +
 fiona/{ograpi.pxd => ograpi2.pxd}     |  62 +++-
 fiona/{ogrext.pyx => ogrext1.pyx}     |  69 +++--
 fiona/{ogrext.pyx => ogrext2.pyx}     | 234 ++++++++++-----
 fiona/transform.py                    |  81 +++++-
 pep-508-install                       |  34 +++
 pyproject.toml                        |   3 +
 requirements-dev.txt                  |   3 +
 requirements.txt                      |   1 +
 scripts/travis_gdal_install.sh        |  89 ++++++
 setup.py                              | 137 +++++----
 tests/test_bigint.py                  |  69 +++++
 tests/test_bytescollection.py         | 102 +++----
 tests/test_cli.py                     |  38 ---
 tests/test_collection.py              | 339 +++++++++++-----------
 tests/test_collection_crs.py          |   4 +-
 tests/test_crs.py                     |  70 +++--
 tests/test_feature.py                 |  14 +-
 tests/test_fio_calc.py                |  71 +++++
 tests/test_fio_cat.py                 |  67 +----
 tests/test_fio_collect.py             |  98 +++++++
 tests/test_fio_distrib.py             |  23 ++
 tests/test_fio_dump.py                |  16 ++
 tests/test_fio_filter.py              |  29 ++
 tests/test_fio_info.py                |  73 +++++
 tests/test_fio_load.py                |  37 +++
 tests/test_fio_ls.py                  |  58 ++++
 tests/test_geometry.py                |  62 ++--
 tests/test_layer.py                   |  14 +-
 tests/test_props.py                   |   4 +-
 tests/test_remove.py                  |  77 +++++
 tests/test_rfc3339.py                 |  14 +-
 tests/test_unicode.py                 |  67 ++++-
 tests/test_vfs.py                     |  24 +-
 64 files changed, 2870 insertions(+), 1572 deletions(-)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..60d9629
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,75 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# IDE's etc.
+.idea/
+venv/
+venv2/
+
+# fiona
+VERSION.txt
+fiona/ogrext.c
+fiona/_drivers.c
+fiona/_err.c
+fiona/_geometry.c
+fiona/_transform.cpp
+fiona/ograpi.pxd
+fiona/ogrext.pyx
+tests/data/coutwildrnp.json
+tests/data/coutwildrnp.tar
+tests/data/coutwildrnp.zip
diff --git a/.travis.yml b/.travis.yml
index a126e9d..d9da6f6 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,19 +1,51 @@
 language: python
+sudo: false
+cache:
+  directories:
+    - $GDALINST
+    - ~/.cache/pip
+env:
+  global:
+    - PIP_WHEEL_DIR=$HOME/.cache/pip/wheels
+    - PIP_FIND_LINKS=file://$HOME/.cache/pip/wheels
+    - GDALINST=$HOME/gdalinstall
+    - GDALBUILD=$HOME/gdalbuild
+  matrix:
+    - GDALVERSION="1.9.2"
+    - GDALVERSION="1.11.4"
+    - GDALVERSION="2.0.2"
+addons:
+  apt:
+    packages:
+    - libgdal1h
+    - gdal-bin
+    - libproj-dev
+    - libhdf5-serial-dev
+    - libpng-dev
+    - libgdal-dev
+    - libatlas-dev
+    - libatlas-base-dev
+    - gfortran
 python:
   - "2.7"
   - "3.3"
   - "3.4"
+  - "3.5"
 before_install:
-  - sudo add-apt-repository -y ppa:ubuntugis/ppa
-  - sudo apt-get update -qq
-  - sudo apt-get install -y libgdal1h gdal-bin libgdal-dev
+  - pip install -U pip
+  - pip install wheel
+  - . ./scripts/travis_gdal_install.sh
+  - export PATH=$GDALINST/gdal-$GDALVERSION/bin:$PATH
+  - export LD_LIBRARY_PATH=$GDALINST/gdal-$GDALVERSION/lib:$LD_LIBRARY_PATH
+  - gdal-config --version
 install:
+  - "if [ $(gdal-config --version) == \"$GDALVERSION\" ]; then echo \"Using gdal $GDALVERSION\"; else echo \"NOT using gdal $GDALVERSION as expected; aborting\"; exit 1; fi"
+  - "pip wheel -r requirements-dev.txt"
   - "pip install -r requirements-dev.txt"
-  - "pip install pytest"
-  - "pip install coveralls"
-  - "pip install -e ."
+  - "pip install --upgrade --force-reinstall --global-option=build_ext --global-option='-I$GDALINST/gdal-$GDALVERSION/include' --global-option='-L$GDALINST/gdal-$GDALVERSION/lib' --global-option='-R$GDALINST/gdal-$GDALVERSION/lib' -e .[test]"
+  - "fio --version"
 script: 
-  - nosetests --exclude test_filter_vsi --exclude test_geopackage
-  - coverage run --source=fiona --omit='*.pxd,*.pyx,*/tests/*,*/docs/*,*/examples/*,*/benchmarks/*' -m nose --exclude test_filter_vsi --exclude test_geopackage
+  - nosetests --exclude test_filter_vsi --exclude test_geopackage --exclude test_write_mismatch
+  - coverage run --source=fiona --omit='*.pxd,*.pyx,*/tests/*,*/docs/*,*/examples/*,*/benchmarks/*' -m nose --exclude test_filter_vsi --exclude test_geopackage --exclude test_write_mismatch
 after_success:
-  - coveralls
+  - coveralls || echo "!! intermittent coveralls failure"
diff --git a/CHANGES.txt b/CHANGES.txt
index e475141..45d2234 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -3,6 +3,19 @@ Changes
 
 All issue numbers are relative to https://github.com/Toblerity/Fiona/issues.
 
+1.7.0 (2016-06-13)
+------------------
+- New feature: support for GDAL version 2+ (#259).
+- New feature: a new fio-calc CLI command (#273).
+- New feature: `--layer` options for fio-info (#316) and fio-load (#299).
+- New feature: a `--no-parse` option for fio-collect that lets a careful user
+  avoid extra JSON serialization and deserialization (#306).
+- Bug fix: `+wktext` is now preserved when serializing CRS from WKT to PROJ.4
+  dicts (#352).
+- Bug fix: a small memory leak when opening a collection has been fixed (#337).
+- Bug fix: internal unicode errors now result in a log message and a 
+  `UnicodeError` exception, not a `TypeError` (#356).
+
 1.6.4 (2016-05-06)
 ------------------
 - Raise ImportError if the active GDAL library version is >= 2.0 instead of
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..01b8644
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,22 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery
+* Personal attacks
+* Trolling or insulting/derogatory comments
+* Public or private harassment
+* Publishing other's private information, such as physical or electronic addresses, without explicit permission
+* Other unethical or unprofessional conduct.
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team.
+
+This code of conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers.
+
+This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
diff --git a/CREDITS.txt b/CREDITS.txt
index d4d28b7..00c00a4 100644
--- a/CREDITS.txt
+++ b/CREDITS.txt
@@ -4,21 +4,24 @@ Credits
 Fiona is written by:
 
 - Sean Gillies <sean.gillies at gmail.com>
+- Rene Buffat <buffat at gmail.com>
 - Kevin Wurster <wursterk at gmail.com>
-- René Buffat <buffat at gmail.com>
+- Matthew Perry <perrygeo at gmail.com>
+- Joshua Arnott <josh at snorfalorpagus.net>
 - Kelsey Jordahl <kjordahl at enthought.com>
-- Patrick Young <patrick.young at digitalglobe.com>
+- Micah Cochran <micah at micahcochran.net>
+- Patrick Young <patrick.mckendree.young at gmail.com>
 - Hannes Gräuler <graeuler at geoplex.de>
 - Johan Van de Wauw <johan.vandewauw at gmail.com>
-- Jacob Wasserman <jwasserman at gmail.com>
-- Joshua Arnott <josh at snorfalorpagus.net>
-- Ryan Grout <rgrout at continuum.io>
 - Michael Weisman <mweisman at gmail.com>
-- Brendan Ward <bcward at consbio.org>
-- Michele Citterio <michele at citterio.net>
+- Ryan Grout <rgrout at continuum.io>
+- Jacob Wasserman <jwasserman at gmail.com>
 - Miro Hrončok <miro at hroncok.cz>
+- Michele Citterio <michele at citterio.net>
+- Brendan Ward <bcward at consbio.org>
 - fredj <frederic.junod at camptocamp.com>
 - wilsaj <wilson.andrew.j+github at gmail.com>
+- Bas Couwenberg <sebastic at xs4all.nl>
 - Brandon Liu <bdon at bdon.org>
 - Hannes Gräuler <hgraeule at uos.de>
 - Ludovic Delauné <ludotux at gmail.com>
@@ -35,4 +38,3 @@ Some portions of this work were supported by a grant (for Pleiades_) from the
 U.S. National Endowment for the Humanities (http://www.neh.gov).
 
 .. _Pleiades: http://pleiades.stoa.org
-
diff --git a/README.rst b/README.rst
index 54cce6d..c4cd169 100644
--- a/README.rst
+++ b/README.rst
@@ -242,7 +242,7 @@ gdal``).
 Python Requirements
 -------------------
 
-Fiona depends on the modules ``six``, ``cligj``, ``argparse``, and
+Fiona depends on the modules ``six``, ``cligj``,  ``munch``, ``argparse``, and
 ``ordereddict`` (the two latter modules are standard in Python 2.7+). Pip will
 fetch these requirements for you, but users installing Fiona from a Windows
 installer must get them separately.
@@ -314,13 +314,20 @@ locations on your system (via your system's package manager), you can do this::
 
   (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git
   (fiona_env)$ cd Fiona
-  (fiona_env)$ pip install -e .
+  (fiona_env)$ pip install cython
+  (fiona_env)$ pip install -e .[test]
   (fiona_env)$ nosetests
 
+Or you can use the ``pep-518-install`` script::
+
+  (fiona_env)$ git clone git://github.com/Toblerity/Fiona.git
+  (fiona_env)$ cd Fiona
+  (fiona_env)$ ./pep-518-install
+
 If you have a non-standard environment, you'll need to specify the include and
 lib dirs and GDAL library on the command line::
 
-  (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal develop
+  (fiona_env)$ python setup.py build_ext -I/path/to/gdal/include -L/path/to/gdal/lib -lgdal --gdalversion 2.0.1 develop
   (fiona_env)$ nosetests
 
 .. _OGR: http://www.gdal.org/ogr
diff --git a/docs/cli.rst b/docs/cli.rst
index be4d25d..ac688b9 100644
--- a/docs/cli.rst
+++ b/docs/cli.rst
@@ -12,14 +12,18 @@ Fiona's new command line interface is a program named "fio".
     Options:
       -v, --verbose  Increase verbosity.
       -q, --quiet    Decrease verbosity.
+      --version      Show the version and exit.
       --help         Show this message and exit.
 
     Commands:
       bounds   Print the extent of GeoJSON objects
+      buffer   Buffer geometries on all sides by a fixed distance.
       cat      Concatenate and print the features of datasets
       collect  Collect a sequence of features.
+      distrib  Distribute features from a collection
       dump     Dump a dataset to GeoJSON.
-      env      Print information about the rio environment.
+      env      Print information about the fio environment.
+      filter   Filter GeoJSON features by python expression
       info     Print information about a dataset.
       insp     Open a dataset and start an interpreter.
       load     Load GeoJSON to a dataset in another format.
@@ -57,9 +61,8 @@ cat
 The cat command concatenates the features of one or more datasets and prints
 them as a `JSON text sequence
 <http://tools.ietf.org/html/draft-ietf-json-text-sequence-07>`__ of features.
-In other words: GeoJSON feature objects, possibly pretty printed, separated by
-ASCII RS (\x1e) chars. LF-separated sequences with no pretty printing are
-optionally available using ``--x-json-seq-no-rs``.
+In other words: GeoJSON feature objects, possibly pretty printed, optionally
+separated by ASCII RS (\x1e) chars using `--rs`.
 
 The output of ``fio cat`` can be piped to ``fio load`` to create new
 concatenated datasets.
@@ -90,6 +93,21 @@ as the output of ``fio cat`` and writes a GeoJSON feature collection.
 
 New in 1.4.0.
 
+distrib
+-------
+
+The inverse of fio-collect, fio-distrib takes a GeoJSON feature collection
+and writes a JSON text sequence of GeoJSON feature objects.
+
+.. code-block:: console
+
+    $ fio info --count tests/data/coutwildrnp.shp
+    67
+    $ fio cat tests/data/coutwildrnp.shp | fio collect | fio distrib | wc -l
+    67
+
+New in 1.4.0.
+
 dump
 ----
 
@@ -220,6 +238,28 @@ collection into a feature sequence.
     > | fio load /tmp/test-seq.shp --x-json-seq --driver Shapefile
 
 
+filter
+------
+The filter command reads GeoJSON features from stdin and writes the feature to 
+stdout *if* the provided expression evalutates to `True` for that feature. 
+
+The python expression is evaluated in a restricted namespace containing 3 functions 
+(`sum`, `min`, `max`), the `math` module, the shapely `shape` function, 
+and an object `f` representing the feature to be evaluated. This `f` object allows
+access in javascript-style dot notation for convenience. 
+
+If the expression evaluates to a "truthy" value, the feature is printed verbatim.
+Otherwise, the feature is excluded from the output.
+
+For example 
+
+    fio cat data.shp \
+    | fio filter "f.properties.area > 1000.0" \
+    | fio collect > large_polygons.geojson
+
+Would create a geojson file with only those features from `data.shp` where the
+area was over a given threshold.
+
 Coordinate Reference System Transformations
 -------------------------------------------
 
diff --git a/fiona/__init__.py b/fiona/__init__.py
index 50c68a8..e17a2d6 100644
--- a/fiona/__init__.py
+++ b/fiona/__init__.py
@@ -46,8 +46,8 @@ feature writing to a "points.shp" file.
   ...     output_schema['geometry'] = 'Point'
   ...     with collection(
   ...             "points.shp", "w",
-  ...             crs=inp.crs, 
-  ...             driver="ESRI Shapefile", 
+  ...             crs=inp.crs,
+  ...             driver="ESRI Shapefile",
   ...             schema=output_schema
   ...             ) as out:
   ...         for f in inp.filter(
@@ -67,12 +67,12 @@ import os
 from six import string_types
 
 from fiona.collection import Collection, BytesCollection, vsi_path
-from fiona._drivers import driver_count, GDALEnv, supported_drivers
+from fiona._drivers import driver_count, GDALEnv
+from fiona.drvsupport import supported_drivers
 from fiona.odict import OrderedDict
-from fiona.ogrext import _bounds, _listlayers, FIELD_TYPES_MAP
+from fiona.ogrext import _bounds, _listlayers, FIELD_TYPES_MAP, _remove
 from fiona.ogrext import (
     calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name)
-import warnings
 
 # These modules are imported by fiona.ogrext, but are also import here to
 # help tools like cx_Freeze find them automatically
@@ -81,15 +81,9 @@ import uuid
 
 
 __all__ = ['bounds', 'listlayers', 'open', 'prop_type', 'prop_width']
-__version__ = "1.6.4"
+__version__ = "1.7.0"
 __gdal_version__ = get_gdal_release_name().decode('utf-8')
 
-# Warn user that they use fiona 1.x with gdal 2.0
-if get_gdal_version_num() >= calc_gdal_version_num(2, 0, 0):
-    raise ImportError(
-        "Fiona {0} is only compatible with GDAL 1.x (installed: {1})".format(
-            __version__, __gdal_version__))
-
 log = logging.getLogger('Fiona')
 class NullHandler(logging.Handler):
     def emit(self, record):
@@ -109,24 +103,24 @@ def open(
         crs_wkt=None):
     """Open file at ``path`` in ``mode`` "r" (read), "a" (append), or
     "w" (write) and return a ``Collection`` object.
-    
+
     In write mode, a driver name such as "ESRI Shapefile" or "GPX" (see
     OGR docs or ``ogr2ogr --help`` on the command line) and a schema
     mapping such as:
-    
+
       {'geometry': 'Point',
-       'properties': [('class', 'int'), ('label', 'str'), 
+       'properties': [('class', 'int'), ('label', 'str'),
                       ('value', 'float')]}
-    
+
     must be provided. If a particular ordering of properties ("fields"
     in GIS parlance) in the written file is desired, a list of (key,
     value) pairs as above or an ordered dict is required. If no ordering
     is needed, a standard dict will suffice.
-    
+
     A coordinate reference system for collections in write mode can be
     defined by the ``crs`` parameter. It takes Proj4 style mappings like
-    
-      {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 
+
+      {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84',
        'no_defs': True}
 
     short hand strings like
@@ -138,10 +132,10 @@ def open(
     The drivers used by Fiona will try to detect the encoding of data
     files. If they fail, you may provide the proper ``encoding``, such
     as 'Windows-1252' for the Natural Earth datasets.
-    
+
     When the provided path is to a file containing multiple named layers
     of data, a layer can be singled out by ``layer``.
-    
+
     A virtual filesystem can be specified. The ``vfs`` parameter may be
     an Apache Commons VFS style string beginning with "zip://" or
     "tar://"". In this case, the ``path`` must be an absolute path
@@ -169,9 +163,9 @@ def open(
                 raise IOError("no such archive file: %r" % archive)
         elif path != '-' and not os.path.exists(path):
             raise IOError("no such file or directory: %r" % path)
-        c = Collection(path, mode, driver=driver,
-                encoding=encoding, layer=layer, vsi=vsi, archive=archive,
-                enabled_drivers=enabled_drivers)
+        c = Collection(path, mode, driver=driver, encoding=encoding,
+                       layer=layer, vsi=vsi, archive=archive,
+                       enabled_drivers=enabled_drivers)
     elif mode == 'w':
         if schema:
             # Make an ordered dict of schema properties.
@@ -179,10 +173,9 @@ def open(
             this_schema['properties'] = OrderedDict(schema['properties'])
         else:
             this_schema = None
-        c = Collection(path, mode,
-                crs=crs, driver=driver, schema=this_schema,
-                encoding=encoding, layer=layer, vsi=vsi, archive=archive,
-                enabled_drivers=enabled_drivers, crs_wkt=crs_wkt)
+        c = Collection(path, mode, crs=crs, driver=driver, schema=this_schema,
+                       encoding=encoding, layer=layer, vsi=vsi, archive=archive,
+                       enabled_drivers=enabled_drivers, crs_wkt=crs_wkt)
     else:
         raise ValueError(
             "mode string must be one of 'r', 'w', or 'a', not %s" % mode)
@@ -191,12 +184,39 @@ def open(
 collection = open
 
 
+def remove(path_or_collection, driver=None):
+    """Deletes an OGR data source
+
+    The required ``path`` argument may be an absolute or relative file path.
+    Alternatively, a Collection can be passed instead in which case the path
+    and driver are automatically determined. Otherwise the ``driver`` argument
+    must be specified.
+
+    Raises a ``RuntimeError`` if the data source cannot be deleted.
+
+    Example usage:
+
+      fiona.remove('test.shp', 'ESRI Shapefile')
+
+    """
+    if isinstance(path_or_collection, Collection):
+        collection = path_or_collection
+        path = collection.path
+        driver = collection.driver
+        collection.close()
+    else:
+        path = path_or_collection
+        if driver is None:
+            raise ValueError("The driver argument is required when removing a path")
+    _remove(path, driver)
+
+
 def listlayers(path, vfs=None):
     """Returns a list of layer names in their index order.
-    
+
     The required ``path`` argument may be an absolute or relative file or
     directory path.
-    
+
     A virtual filesystem can be specified. The ``vfs`` parameter may be
     an Apache Commons VFS style string beginning with "zip://" or
     "tar://"". In this case, the ``path`` must be an absolute path within
@@ -206,15 +226,15 @@ def listlayers(path, vfs=None):
         raise TypeError("invalid path: %r" % path)
     if vfs and not isinstance(vfs, string_types):
         raise TypeError("invalid vfs: %r" % vfs)
-    
+
     path, vsi, archive = parse_paths(path, vfs)
-    
+
     if archive:
         if not os.path.exists(archive):
             raise IOError("no such archive file: %r" % archive)
     elif not os.path.exists(path):
         raise IOError("no such file or directory: %r" % path)
-    
+
     with drivers():
         return _listlayers(vsi_path(path, vsi, archive))
 
@@ -246,9 +266,9 @@ def prop_width(val):
 
 def prop_type(text):
     """Returns a schema property's proper Python type.
-    
+
     Example:
-    
+
       >>> prop_type('int')
       <class 'int'>
       >>> prop_type('str:25')
@@ -270,7 +290,7 @@ def drivers(*args, **kwargs):
 
 def bounds(ob):
     """Returns a (minx, miny, maxx, maxy) bounding box.
-    
+
     The ``ob`` may be a feature record or geometry."""
     geom = ob.get('geometry') or ob
     return _bounds(geom)
diff --git a/fiona/_cpl.pxd b/fiona/_cpl.pxd
new file mode 100644
index 0000000..6e43893
--- /dev/null
+++ b/fiona/_cpl.pxd
@@ -0,0 +1,26 @@
+cdef extern from "cpl_conv.h":
+    void *  CPLMalloc (size_t)
+    void    CPLFree (void *ptr)
+    void    CPLSetThreadLocalConfigOption (char *key, char *val)
+    const char *CPLGetConfigOption (char *, char *)
+
+cdef extern from "cpl_string.h":
+    char ** CSLSetNameValue (char **list, char *name, char *value)
+    void    CSLDestroy (char **list)
+
+cdef extern from "cpl_vsi.h":
+    ctypedef struct VSILFILE:
+        pass
+    int VSIFCloseL (VSILFILE *)
+    VSILFILE * VSIFileFromMemBuffer (const char * filename,
+                                     unsigned char * data,
+                                     int data_len,
+                                     int take_ownership)
+    int VSIUnlink (const char * pathname)
+
+ctypedef int OGRErr
+ctypedef struct OGREnvelope:
+    double MinX
+    double MaxX
+    double MinY
+    double MaxY
diff --git a/fiona/_crs.pxd b/fiona/_crs.pxd
new file mode 100644
index 0000000..999d08d
--- /dev/null
+++ b/fiona/_crs.pxd
@@ -0,0 +1,18 @@
+cdef extern from "ogr_srs_api.h":
+    void    OSRCleanup ()
+    void *  OSRClone (void *srs)
+    void    OSRDestroySpatialReference (void *srs)
+    int     OSRExportToProj4 (void *srs, char **params)
+    int     OSRExportToWkt (void *srs, char **params)
+    int     OSRImportFromEPSG (void *srs, int code)
+    int     OSRImportFromProj4 (void *srs, char *proj)
+    int     OSRSetFromUserInput (void *srs, char *input)
+    int     OSRAutoIdentifyEPSG (void *srs)
+    int     OSRFixup(void *srs)
+    const char * OSRGetAuthorityName (void *srs, const char *key)
+    const char * OSRGetAuthorityCode (void *srs, const char *key)
+    void *  OSRNewSpatialReference (char *wkt)
+    void    OSRRelease (void *srs)
+    void *  OCTNewCoordinateTransformation (void *source, void *dest)
+    void    OCTDestroyCoordinateTransformation (void *source)
+    int     OCTTransform (void *ct, int nCount, double *x, double *y, double *z)
diff --git a/fiona/_crs.pyx b/fiona/_crs.pyx
new file mode 100644
index 0000000..b4da1cb
--- /dev/null
+++ b/fiona/_crs.pyx
@@ -0,0 +1,68 @@
+"""Extension module supporting crs.py.
+
+Calls methods from GDAL's OSR module.
+"""
+
+import logging
+
+from six import string_types
+
+from fiona cimport _cpl, _crs
+from fiona.errors import CRSError
+
+
+logger = logging.getLogger(__name__)
+
+
+# Export a WKT string from input crs.
+def crs_to_wkt(crs):
+    """Convert a Fiona CRS object to WKT format"""
+    cdef void *cogr_srs = NULL
+    cdef char *proj_c = NULL
+
+    cogr_srs = _crs.OSRNewSpatialReference(NULL)
+    if cogr_srs == NULL:
+        raise CRSError("NULL spatial reference")
+
+    # First, check for CRS strings like "EPSG:3857".
+    if isinstance(crs, string_types):
+        proj_b = crs.encode('utf-8')
+        proj_c = proj_b
+        _crs.OSRSetFromUserInput(cogr_srs, proj_c)
+    elif isinstance(crs, dict):
+        # EPSG is a special case.
+        init = crs.get('init')
+        if init:
+            logger.debug("Init: %s", init)
+            auth, val = init.split(':')
+            if auth.upper() == 'EPSG':
+                logger.debug("Setting EPSG: %s", val)
+                _crs.OSRImportFromEPSG(cogr_srs, int(val))
+        else:
+            params = []
+            crs['wktext'] = True
+            for k, v in crs.items():
+                if v is True or (k in ('no_defs', 'wktext') and v):
+                    params.append("+%s" % k)
+                else:
+                    params.append("+%s=%s" % (k, v))
+            proj = " ".join(params)
+            logger.debug("PROJ.4 to be imported: %r", proj)
+            proj_b = proj.encode('utf-8')
+            proj_c = proj_b
+            _crs.OSRImportFromProj4(cogr_srs, proj_c)
+    else:
+        raise ValueError("Invalid CRS")
+
+    # Fixup, export to WKT, and set the GDAL dataset's projection.
+    _crs.OSRFixup(cogr_srs)
+
+    _crs.OSRExportToWkt(cogr_srs, &proj_c)
+
+    if proj_c == NULL:
+        raise CRSError("Null projection")
+
+    proj_b = proj_c
+    _cpl.CPLFree(proj_c)
+
+    return proj_b.decode('utf-8')
diff --git a/fiona/_drivers.pyx b/fiona/_drivers.pyx
index c320455..4f6df2e 100644
--- a/fiona/_drivers.pyx
+++ b/fiona/_drivers.pyx
@@ -148,133 +148,3 @@ cdef class GDALEnv(object):
             val_b = val
             result[key_b.decode('utf-8')] = val_b.decode('utf-8')
         return result
-
-
-# Here is the list of available drivers as (name, modes) tuples. Currently,
-# we only expose the defaults (excepting FileGDB). We also don't expose
-# the CSV or GeoJSON drivers. Use Python's csv and json modules instead.
-# Might still exclude a few more of these after making a pass through the
-# entries for each at http://www.gdal.org/ogr/ogr_formats.html to screen
-# out the multi-layer formats.
-
-supported_drivers = dict([
-#OGR Vector Formats
-#Format Name 	Code 	Creation 	Georeferencing 	Compiled by default
-#Aeronav FAA files 	AeronavFAA 	No 	Yes 	Yes
-    ("AeronavFAA", "r"),
-#ESRI ArcObjects 	ArcObjects 	No 	Yes 	No, needs ESRI ArcObjects
-#Arc/Info Binary Coverage 	AVCBin 	No 	Yes 	Yes
-# multi-layer
-#   ("AVCBin", "r"),
-#Arc/Info .E00 (ASCII) Coverage 	AVCE00 	No 	Yes 	Yes
-# multi-layer
-#    ("AVCE00", "r"),
-#Arc/Info Generate 	ARCGEN 	No 	No 	Yes
-    ("ARCGEN", "r"),
-#Atlas BNA 	BNA 	Yes 	No 	Yes
-    ("BNA", "raw"),
-#AutoCAD DWG 	DWG 	No 	No 	No
-#AutoCAD DXF 	DXF 	Yes 	No 	Yes
-    ("DXF", "raw"),
-#Comma Separated Value (.csv) 	CSV 	Yes 	No 	Yes
-#CouchDB / GeoCouch 	CouchDB 	Yes 	Yes 	No, needs libcurl
-#DODS/OPeNDAP 	DODS 	No 	Yes 	No, needs libdap
-#EDIGEO 	EDIGEO 	No 	Yes 	Yes
-# multi-layer? Hard to tell from the OGR docs
-#   ("EDIGEO", "r"),
-#ElasticSearch 	ElasticSearch 	Yes (write-only) 	- 	No, needs libcurl
-#ESRI FileGDB 	FileGDB 	Yes 	Yes 	No, needs FileGDB API library
-# multi-layer
-    ("FileGDB", "raw"),
-    ("OpenFileGDB", "r"),
-#ESRI Personal GeoDatabase 	PGeo 	No 	Yes 	No, needs ODBC library
-#ESRI ArcSDE 	SDE 	No 	Yes 	No, needs ESRI SDE
-#ESRI Shapefile 	ESRI Shapefile 	Yes 	Yes 	Yes
-    ("ESRI Shapefile", "raw"),
-#FMEObjects Gateway 	FMEObjects Gateway 	No 	Yes 	No, needs FME
-#GeoJSON 	GeoJSON 	Yes 	Yes 	Yes
-    ("GeoJSON", "rw"),
-#Géoconcept Export 	Geoconcept 	Yes 	Yes 	Yes
-# multi-layers
-#   ("Geoconcept", "raw"),
-#Geomedia .mdb 	Geomedia 	No 	No 	No, needs ODBC library
-#GeoPackage	GPKG	Yes	Yes	No, needs libsqlite3
-    ("GPKG", "rw"),
-#GeoRSS 	GeoRSS 	Yes 	Yes 	Yes (read support needs libexpat)
-#Google Fusion Tables 	GFT 	Yes 	Yes 	No, needs libcurl
-#GML 	GML 	Yes 	Yes 	Yes (read support needs Xerces or libexpat)
-#GMT 	GMT 	Yes 	Yes 	Yes
-    ("GMT", "raw"),
-#GPSBabel 	GPSBabel 	Yes 	Yes 	Yes (needs GPSBabel and GPX driver)
-#GPX 	GPX 	Yes 	Yes 	Yes (read support needs libexpat)
-    ("GPX", "raw"),
-#GRASS 	GRASS 	No 	Yes 	No, needs libgrass
-#GPSTrackMaker (.gtm, .gtz) 	GPSTrackMaker 	Yes 	Yes 	Yes
-    ("GPSTrackMaker", "raw"),
-#Hydrographic Transfer Format 	HTF 	No 	Yes 	Yes
-# TODO: Fiona is not ready for multi-layer formats: ("HTF", "r"),
-#Idrisi Vector (.VCT) 	Idrisi 	No 	Yes 	Yes
-    ("Idrisi", "r"),
-#Informix DataBlade 	IDB 	Yes 	Yes 	No, needs Informix DataBlade
-#INTERLIS 	"Interlis 1" and "Interlis 2" 	Yes 	Yes 	No, needs Xerces (INTERLIS model reading needs ili2c.jar)
-#INGRES 	INGRES 	Yes 	No 	No, needs INGRESS
-#KML 	KML 	Yes 	Yes 	Yes (read support needs libexpat)
-#LIBKML 	LIBKML 	Yes 	Yes 	No, needs libkml
-#Mapinfo File 	MapInfo File 	Yes 	Yes 	Yes
-    ("MapInfo File", "raw"),
-#Microstation DGN 	DGN 	Yes 	No 	Yes
-    ("DGN", "raw"),
-#Access MDB (PGeo and Geomedia capable) 	MDB 	No 	Yes 	No, needs JDK/JRE
-#Memory 	Memory 	Yes 	Yes 	Yes
-#MySQL 	MySQL 	No 	Yes 	No, needs MySQL library
-#NAS - ALKIS 	NAS 	No 	Yes 	No, needs Xerces
-#Oracle Spatial 	OCI 	Yes 	Yes 	No, needs OCI library
-#ODBC 	ODBC 	No 	Yes 	No, needs ODBC library
-#MS SQL Spatial 	MSSQLSpatial 	Yes 	Yes 	No, needs ODBC library
-#Open Document Spreadsheet 	ODS 	Yes 	No 	No, needs libexpat
-#OGDI Vectors (VPF, VMAP, DCW) 	OGDI 	No 	Yes 	No, needs OGDI library
-#OpenAir 	OpenAir 	No 	Yes 	Yes
-# multi-layer
-#   ("OpenAir", "r"),
-#PCI Geomatics Database File 	PCIDSK 	No 	No 	Yes, using internal PCIDSK SDK (from GDAL 1.7.0)
-    ("PCIDSK", "r"),
-#PDS 	PDS 	No 	Yes 	Yes
-    ("PDS", "r"),
-#PGDump 	PostgreSQL SQL dump 	Yes 	Yes 	Yes
-#PostgreSQL/PostGIS 	PostgreSQL/PostGIS 	Yes 	Yes 	No, needs PostgreSQL client library (libpq)
-#EPIInfo .REC 	REC 	No 	No 	Yes
-#S-57 (ENC) 	S57 	No 	Yes 	Yes
-# multi-layer
-#   ("S57", "r"),
-#SDTS 	SDTS 	No 	Yes 	Yes
-# multi-layer
-#   ("SDTS", "r"),
-#SEG-P1 / UKOOA P1/90 	SEGUKOOA 	No 	Yes 	Yes
-# multi-layers
-#   ("SEGUKOOA", "r"),
-#SEG-Y 	SEGY 	No 	No 	Yes
-    ("SEGY", "r"),
-#Norwegian SOSI Standard 	SOSI 	No 	Yes 	No, needs FYBA library
-#SQLite/SpatiaLite 	SQLite 	Yes 	Yes 	No, needs libsqlite3 or libspatialite
-#SUA 	SUA 	No 	Yes 	Yes
-    ("SUA", "r"),
-#SVG 	SVG 	No 	Yes 	No, needs libexpat
-#UK .NTF 	UK. NTF 	No 	Yes 	Yes
-# multi-layer
-#   ("UK. NTF", "r"),
-#U.S. Census TIGER/Line 	TIGER 	No 	Yes 	Yes
-# multi-layer
-#   ("TIGER", "r"),
-#VFK data 	VFK 	No 	Yes 	Yes
-# multi-layer
-#   ("VFK", "r"),
-#VRT - Virtual Datasource 	VRT 	No 	Yes 	Yes
-# multi-layer
-#   ("VRT", "r"),
-#OGC WFS (Web Feature Service) 	WFS 	Yes 	Yes 	No, needs libcurl
-#MS Excel format 	XLS 	No 	No 	No, needs libfreexl
-#Office Open XML spreadsheet 	XLSX 	Yes 	No 	No, needs libexpat
-#X-Plane/Flighgear aeronautical data 	XPLANE 	No 	Yes 	Yes
-# multi-layer
-#   ("XPLANE", "r") 
-])
diff --git a/fiona/collection.py b/fiona/collection.py
index d553582..5713753 100644
--- a/fiona/collection.py
+++ b/fiona/collection.py
@@ -10,40 +10,34 @@ from fiona.ogrext import (
     calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name)
 from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file
 from fiona.errors import DriverError, SchemaError, CRSError
-from fiona._drivers import driver_count, GDALEnv, supported_drivers
+from fiona._drivers import driver_count, GDALEnv
+from fiona.drvsupport import supported_drivers
 from six import string_types, binary_type
 
 class Collection(object):
 
     """A file-like interface to features of a vector dataset
-    
+
     Python text file objects are iterators over lines of a file. Fiona
     Collections are similar iterators (not lists!) over features
     represented as GeoJSON-like mappings.
     """
 
-    def __init__(
-            self, path, mode='r', 
-            driver=None, schema=None, crs=None, 
-            encoding=None,
-            layer=None,
-            vsi=None,
-            archive=None,
-            enabled_drivers=None,
-            crs_wkt=None,
-            **kwargs):
-        
+    def __init__(self, path, mode='r', driver=None, schema=None, crs=None,
+                 encoding=None, layer=None, vsi=None, archive=None,
+                 enabled_drivers=None, crs_wkt=None, **kwargs):
+
         """The required ``path`` is the absolute or relative path to
         a file, such as '/data/test_uk.shp'. In ``mode`` 'r', data can
         be read only. In ``mode`` 'a', data can be appended to a file.
         In ``mode`` 'w', data overwrites the existing contents of
         a file.
-        
+
         In ``mode`` 'w', an OGR ``driver`` name and a ``schema`` are
         required. A Proj4 ``crs`` string is recommended. If both ``crs``
-        and ``crs_wkt`` keyword arguments are passed, the latter will 
+        and ``crs_wkt`` keyword arguments are passed, the latter will
         trump the former.
-        
+
         In 'w' mode, kwargs will be mapped to OGR layer creation
         options.
         """
@@ -74,8 +68,8 @@ class Collection(object):
         if (driver == "GPKG" and
                 get_gdal_version_num() < calc_gdal_version_num(1, 11, 0)):
             raise DriverError(
-                    "GPKG driver requires GDAL 1.11.0, "
-                    "fiona was compiled against: {}".format(get_gdal_release_name()))
+                "GPKG driver requires GDAL 1.11.0, fiona was compiled "
+                "against: {}".format(get_gdal_release_name()))
 
         self.session = None
         self.iterator = None
@@ -87,9 +81,9 @@ class Collection(object):
         self._crs_wkt = None
         self.env = None
         self.enabled_drivers = enabled_drivers
-        
+
         self.path = vsi_path(path, vsi, archive)
-        
+
         if mode == 'w':
             if layer and not isinstance(layer, string_types):
                 raise ValueError("in 'r' mode, layer names must be strings")
@@ -105,9 +99,9 @@ class Collection(object):
                 self.name = 0
             else:
                 self.name = layer or os.path.basename(os.path.splitext(path)[0])
-        
+
         self.mode = mode
-        
+
         if self.mode == 'w':
             if driver == 'Shapefile':
                 driver = 'ESRI Shapefile'
@@ -120,7 +114,7 @@ class Collection(object):
                 raise DriverError(
                     "unsupported mode: %r" % self.mode)
             self._driver = driver
-            
+
             if not schema:
                 raise SchemaError("no schema")
             elif 'properties' not in schema:
@@ -149,7 +143,7 @@ class Collection(object):
             self.encoding = encoding
             self.session = Session()
             self.session.start(self)
-            
+
             # If encoding param is None, we'll use what the session
             # suggests.
             self.encoding = encoding or self.session.get_fileencoding().lower()
@@ -185,10 +179,10 @@ class Collection(object):
             self._driver = self.session.get_driver()
         return self._driver
 
-    @property 
+    @property
     def schema(self):
         """Returns a mapping describing the data schema.
-        
+
         The mapping has 'geometry' and 'properties' items. The former is a
         string such as 'Point' and the latter is an ordered mapping that
         follows the order of fields in the data file.
@@ -219,11 +213,13 @@ class Collection(object):
             'driver': self.driver, 'schema': self.schema, 'crs': self.crs,
             'crs_wkt': self.crs_wkt}
 
+    profile = meta
+
     def filter(self, *args, **kwds):
         """Returns an iterator over records, but filtered by a test for
         spatial intersection with the provided ``bbox``, a (minx, miny,
         maxx, maxy) tuple or a geometry ``mask``.
-        
+
         Positional arguments ``stop`` or ``start, stop[, step]`` allows
         iteration to skip over items or stop at a specific item.
         """
@@ -247,11 +243,11 @@ class Collection(object):
         return self.iterator
 
     def items(self, *args, **kwds):
-        """Returns an iterator over FID, record pairs, optionally 
+        """Returns an iterator over FID, record pairs, optionally
         filtered by a test for spatial intersection with the provided
         ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry
         ``mask``.
-        
+
         Positional arguments ``stop`` or ``start, stop[, step]`` allows
         iteration to skip over items or stop at a specific item.
         """
@@ -275,9 +271,9 @@ class Collection(object):
         return self.iterator
 
     def keys(self, *args, **kwds):
-        """Returns an iterator over FIDs, optionally 
+        """Returns an iterator over FIDs, optionally
         filtered by a test for spatial intersection with the provided
-        ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry 
+        ``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry
         ``mask``.
 
         Positional arguments ``stop`` or ``start, stop[, step]`` allows
@@ -343,9 +339,10 @@ class Collection(object):
         """
         # Currently we only compare keys of properties, not the types of
         # values.
-        return set(record['properties'].keys()
-            ) == set(self.schema['properties'].keys()
-            ) and self.validate_record_geometry(record)
+        return (
+            set(record['properties'].keys()) ==
+            set(self.schema['properties'].keys()) and
+            self.validate_record_geometry(record))
 
     def validate_record_geometry(self, record):
         """Compares the record's geometry to the collection's schema.
@@ -354,15 +351,16 @@ class Collection(object):
         """
         # Shapefiles welcome mixes of line/multis and polygon/multis.
         # OGR reports these mixed files as type "Polygon" or "LineString"
-        # but will return either these or their multi counterparts when 
+        # but will return either these or their multi counterparts when
         # reading features.
-        if (self.driver == "ESRI Shapefile" and 
+        if (self.driver == "ESRI Shapefile" and
                 "Point" not in record['geometry']['type']):
             return record['geometry']['type'].lstrip(
                 "Multi") == self.schema['geometry'].lstrip("3D ").lstrip(
                     "Multi")
         else:
-            return (record['geometry']['type'] ==
+            return (
+                record['geometry']['type'] ==
                 self.schema['geometry'].lstrip("3D "))
 
     def __len__(self):
@@ -392,7 +390,7 @@ class Collection(object):
     def close(self):
         """In append or write mode, flushes data to disk, then ends
         access."""
-        if self.session is not None: 
+        if self.session is not None:
             if self.mode in ('a', 'w'):
                 self.flush()
             self.session.stop()
diff --git a/fiona/crs.py b/fiona/crs.py
index 1def3bc..ac80f6a 100644
--- a/fiona/crs.py
+++ b/fiona/crs.py
@@ -1,17 +1,18 @@
-# Coordinate reference systems and functions.
-#
-# PROJ.4 is the law of this land: http://proj.osgeo.org/. But whereas PROJ.4
-# coordinate reference systems are described by strings of parameters such as
-#
-#   +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
-#
-# here we use mappings:
-#
-#   {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True}
-#
+"""Coordinate reference systems and functions
+
+PROJ.4 is the law of this land: http://proj.osgeo.org/. But whereas PROJ.4
+coordinate reference systems are described by strings of parameters such as
+
+    +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
+
+here we use mappings:
+
+    {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True}
+"""
 
 from six import string_types
 
+
 def to_string(crs):
     """Turn a parameter mapping into a more conventional PROJ.4 string.
 
@@ -23,15 +24,16 @@ def to_string(crs):
     items = []
     for k, v in sorted(filter(
             lambda x: x[0] in all_proj_keys and x[1] is not False and (
-                isinstance(x[1], (bool, int, float)) or 
+                isinstance(x[1], (bool, int, float)) or
                 isinstance(x[1], string_types)),
-            crs.items() )):
+            crs.items())):
         items.append(
             "+" + "=".join(
                 map(str, filter(
-                    lambda y: (y or y == 0) and y is not True, (k, v)))) )
+                    lambda y: (y or y == 0) and y is not True, (k, v)))))
     return " ".join(items)
 
+
 def from_string(prjs):
     """Turn a PROJ.4 string into a mapping of parameters.
 
@@ -39,6 +41,7 @@ def from_string(prjs):
     are checked against the ``all_proj_keys`` list.
     """
     parts = [o.lstrip('+') for o in prjs.strip().split()]
+
     def parse(v):
         try:
             return int(v)
@@ -50,8 +53,9 @@ def from_string(prjs):
             return v
     items = map(
         lambda kv: len(kv) == 2 and (kv[0], parse(kv[1])) or (kv[0], True),
-        (p.split('=') for p in parts) )
-    return dict((k,v) for k, v in items if k in all_proj_keys)
+        (p.split('=') for p in parts))
+    return dict((k, v) for k, v in items if k in all_proj_keys)
+
 
 def from_epsg(code):
     """Given an integer code, returns an EPSG-like mapping.
@@ -173,10 +177,9 @@ _param_data = """
 +x_0       False easting
 +y_0       False northing
 +zone      UTM zone
++wktext    Marker
 """
 
 _lines = filter(lambda x: len(x) > 1, _param_data.split("\n"))
 all_proj_keys = list(
-    set(line.split()[0].lstrip("+").strip() for line in _lines) 
-    ) + ['no_mayo']
-
+    set(line.split()[0].lstrip("+").strip() for line in _lines)) + ['no_mayo']
diff --git a/fiona/_drivers.pyx b/fiona/drvsupport.py
similarity index 54%
copy from fiona/_drivers.pyx
copy to fiona/drvsupport.py
index c320455..e50b122 100644
--- a/fiona/_drivers.pyx
+++ b/fiona/drvsupport.py
@@ -1,153 +1,6 @@
-# The GDAL and OGR driver registry.
-# GDAL driver management.
+# -*- coding: utf-8 -*-
 
-import os
-import os.path
-import logging
-import sys
-
-from six import string_types
-
-
-cdef extern from "cpl_conv.h":
-    void    CPLFree (void *ptr)
-    void    CPLSetThreadLocalConfigOption (char *key, char *val)
-    const char * CPLGetConfigOption ( const char *key, const char *default)
-
-
-cdef extern from "cpl_error.h":
-    void CPLSetErrorHandler (void *handler)
-
-
-cdef extern from "gdal.h":
-    void GDALAllRegister()
-    void GDALDestroyDriverManager()
-    int GDALGetDriverCount()
-    void * GDALGetDriver(int i)
-    const char * GDALGetDriverShortName(void *driver)
-    const char * GDALGetDriverLongName(void *driver)
-
-
-cdef extern from "ogr_api.h":
-    void OGRRegisterDriver(void *driver)
-    void OGRDeregisterDriver(void *driver)
-    void OGRRegisterAll()
-    void OGRCleanupAll()
-    int OGRGetDriverCount()
-    void * OGRGetDriver(int i)
-    void * OGRGetDriverByName(const char *name)
-    const char * OGR_Dr_GetName(void *driver)
-
-
-log = logging.getLogger('Fiona')
-class NullHandler(logging.Handler):
-    def emit(self, record):
-        pass
-log.addHandler(NullHandler())
-
-
-level_map = {
-    0: 0,
-    1: logging.DEBUG,
-    2: logging.WARNING,
-    3: logging.ERROR,
-    4: logging.CRITICAL }
-
-code_map = {
-    0: 'CPLE_None',
-    1: 'CPLE_AppDefined',
-    2: 'CPLE_OutOfMemory',
-    3: 'CPLE_FileIO',
-    4: 'CPLE_OpenFailed',
-    5: 'CPLE_IllegalArg',
-    6: 'CPLE_NotSupported',
-    7: 'CPLE_AssertionFailed',
-    8: 'CPLE_NoWriteAccess',
-    9: 'CPLE_UserInterrupt',
-    10: 'CPLE_ObjectNull'
-}
-
-cdef void * errorHandler(int eErrClass, int err_no, char *msg):
-    log.log(level_map[eErrClass], "%s in %s", code_map[err_no], msg)
-
-
-def driver_count():
-    return OGRGetDriverCount()
-
-
-cdef class GDALEnv(object):
-
-    cdef public object options
-
-    def __init__(self, **options):
-        self.options = options.copy()
-
-    def __enter__(self):
-        self.start()
-        return self
-
-    def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
-        self.stop()
-
-    def start(self):
-        cdef const char *key_c = NULL
-        cdef const char *val_c = NULL
-
-        if GDALGetDriverCount() == 0:
-            GDALAllRegister()
-        if OGRGetDriverCount() == 0:
-            OGRRegisterAll()
-        CPLSetErrorHandler(<void *>errorHandler)
-        if OGRGetDriverCount() == 0:
-            raise ValueError("Drivers not registered")
-
-        if 'GDAL_DATA' not in os.environ:
-            whl_datadir = os.path.abspath(
-                os.path.join(os.path.dirname(__file__), "gdal_data"))
-            share_datadir = os.path.join(sys.prefix, 'share/gdal')
-            if os.path.exists(os.path.join(whl_datadir, 'pcs.csv')):
-                os.environ['GDAL_DATA'] = whl_datadir
-            elif os.path.exists(os.path.join(share_datadir, 'pcs.csv')):
-                os.environ['GDAL_DATA'] = share_datadir
-        if 'PROJ_LIB' not in os.environ:
-            whl_datadir = os.path.abspath(
-                os.path.join(os.path.dirname(__file__), "proj_data"))
-            os.environ['PROJ_LIB'] = whl_datadir
-
-        for key, val in self.options.items():
-            key_b = key.upper().encode('utf-8')
-            key_c = key_b
-            if isinstance(val, string_types):
-                val_b = val.encode('utf-8')
-            else:
-                val_b = ('ON' if val else 'OFF').encode('utf-8')
-            val_c = val_b
-            CPLSetThreadLocalConfigOption(key_c, val_c)
-            log.debug("Option %s=%s", key, CPLGetConfigOption(key_c, NULL))
-        return self
-
-    def stop(self):
-        cdef const char *key_c = NULL
-        for key in self.options:
-            key_b = key.upper().encode('utf-8')
-            key_c = key_b
-            CPLSetThreadLocalConfigOption(key_c, NULL)
-        CPLSetErrorHandler(NULL)
-
-    def drivers(self):
-        cdef void *drv = NULL
-        cdef char *key = NULL
-        cdef char *val = NULL
-        cdef int i
-        result = {}
-        for i in range(OGRGetDriverCount()):
-            drv = OGRGetDriver(i)
-            key = OGR_Dr_GetName(drv)
-            key_b = key
-            val = OGR_Dr_GetName(drv)
-            val_b = val
-            result[key_b.decode('utf-8')] = val_b.decode('utf-8')
-        return result
+from fiona._drivers import GDALEnv
 
 
 # Here is the list of available drivers as (name, modes) tuples. Currently,
@@ -278,3 +131,24 @@ supported_drivers = dict([
 # multi-layer
 #   ("XPLANE", "r") 
 ])
+
+
+# Removes drivers in the supported_drivers dictionary that the   
+# machine's installation of OGR due to how it is compiled.
+# OGR may not have optional libararies compiled or installed.
+def _filter_supported_drivers():
+    global supported_drivers
+
+    gdalenv = GDALEnv()
+    ogrdrv_names = gdalenv.start().drivers().keys()
+    supported_drivers_copy = supported_drivers.copy()
+
+    for drv in supported_drivers.keys():
+        if drv not in ogrdrv_names:
+            del supported_drivers_copy[drv]
+
+    gdalenv.stop()
+
+    supported_drivers = supported_drivers_copy
+
+_filter_supported_drivers()
diff --git a/fiona/errors.py b/fiona/errors.py
index ed520d0..4ab5f2f 100644
--- a/fiona/errors.py
+++ b/fiona/errors.py
@@ -23,11 +23,3 @@ class DataIOError(IOError):
 
 class FieldNameEncodeError(UnicodeEncodeError):
     """Failure to encode a field name."""
-
-
-class StringFieldEncodeError(UnicodeEncodeError):
-    """Failure to encode a string field value."""
-
-
-class StringFieldDecodeError(UnicodeDecodeError):
-    """Failure to decode a string field value."""
diff --git a/fiona/fio/bounds.py b/fiona/fio/bounds.py
index 2ba1074..109dd65 100644
--- a/fiona/fio/bounds.py
+++ b/fiona/fio/bounds.py
@@ -1,15 +1,16 @@
+"""$ fio bounds"""
+
+
 import json
 import logging
-import sys
 
 import click
 from cligj import precision_opt, use_rs_opt
 
 import fiona
-from .helpers import obj_gen
+from fiona.fio.helpers import obj_gen
 
 
-# Bounds command
 @click.command(short_help="Print the extent of GeoJSON objects")
 @precision_opt
 @click.option('--explode/--no-explode', default=False,
@@ -24,7 +25,7 @@ from .helpers import obj_gen
 @click.pass_context
 def bounds(ctx, precision, explode, with_id, with_obj, use_rs):
     """Print the bounding boxes of GeoJSON objects read from stdin.
-    
+
     Optionally explode collections and print the bounds of their
     features.
 
@@ -50,11 +51,14 @@ def bounds(ctx, precision, explode, with_id, with_obj, use_rs):
                 feat_id = feat.get('id', 'feature:' + str(i))
                 w, s, e, n = fiona.bounds(feat)
                 if precision > 0:
-                    w, s, e, n = (round(v, precision) 
+                    w, s, e, n = (round(v, precision)
                                   for v in (w, s, e, n))
                 if explode:
                     if with_id:
-                        rec = {'parent': obj_id, 'id': feat_id, 'bbox': (w, s, e, n)}
+                        rec = {
+                            'parent': obj_id,
+                            'id': feat_id,
+                            'bbox': (w, s, e, n)}
                     elif with_obj:
                         feat.update(parent=obj_id, bbox=(w, s, e, n))
                         rec = feat
diff --git a/fiona/fio/calc.py b/fiona/fio/calc.py
new file mode 100644
index 0000000..716ab35
--- /dev/null
+++ b/fiona/fio/calc.py
@@ -0,0 +1,63 @@
+from __future__ import division
+import json
+import logging
+
+import click
+from cligj import use_rs_opt
+
+from .helpers import obj_gen, eval_feature_expression
+
+
+ at click.command(short_help="Calculate GeoJSON property by Python expression")
+ at click.argument('property_name')
+ at click.argument('expression')
+ at click.option('--overwrite', is_flag=True, default=False,
+              help="Overwrite properties, default: False")
+ at use_rs_opt
+ at click.pass_context
+def calc(ctx, property_name, expression, overwrite, use_rs):
+    """
+    Create a new property on GeoJSON features using the specified expression.
+
+    \b
+    The expression is evaluated in a restricted namespace containing:
+        - sum, pow, min, max and the imported math module
+        - shape (optional, imported from shapely.geometry if available)
+        - bool, int, str, len, float type conversions
+        - f (the feature to be evaluated,
+             allows item access via javascript-style dot notation using munch)
+
+    The expression will be evaluated for each feature and its
+    return value will be added to the properties
+    as the specified property_name. Existing properties will not
+    be overwritten by default (an Exception is raised).
+
+    Example
+
+    \b
+    $ fio cat data.shp | fio calc sumAB  "f.properties.A + f.properties.B"
+    """
+    logger = logging.getLogger('fio')
+    stdin = click.get_text_stream('stdin')
+
+    try:
+        source = obj_gen(stdin)
+        for i, obj in enumerate(source):
+            features = obj.get('features') or [obj]
+            for j, feat in enumerate(features):
+
+                if not overwrite and property_name in feat['properties']:
+                    raise click.UsageError(
+                        '{0} already exists in properties; '
+                        'rename or use --overwrite'.format(property_name))
+
+                feat['properties'][property_name] = eval_feature_expression(
+                    feat, expression)
+
+                if use_rs:
+                    click.echo(u'\u001e', nl=False)
+                click.echo(json.dumps(feat))
+
+    except Exception:
+        logger.exception("Exception caught during processing")
+        raise click.Abort()
diff --git a/fiona/fio/cat.py b/fiona/fio/cat.py
index a2ed042..9b97199 100644
--- a/fiona/fio/cat.py
+++ b/fiona/fio/cat.py
@@ -1,85 +1,31 @@
-from functools import partial
-import itertools
+"""$ fio cat"""
+
+
 import json
 import logging
-import sys
 import warnings
 
 import click
-from cligj import (
-    compact_opt, files_in_arg, indent_opt,
-    sequence_opt, precision_opt, use_rs_opt)
+import cligj
 
 import fiona
 from fiona.transform import transform_geom
-from .helpers import obj_gen
-from . import options
+from fiona.fio import options
 
 
-FIELD_TYPES_MAP_REV = dict([(v, k) for k, v in fiona.FIELD_TYPES_MAP.items()])
-
 warnings.simplefilter('default')
 
 
-def make_ld_context(context_items):
-    """Returns a JSON-LD Context object.
-
-    See http://json-ld.org/spec/latest/json-ld."""
-    ctx = {
-      "@context": {
-        "geojson": "http://ld.geojson.org/vocab#",
-        "Feature": "geojson:Feature",
-        "FeatureCollection": "geojson:FeatureCollection",
-        "GeometryCollection": "geojson:GeometryCollection",
-        "LineString": "geojson:LineString",
-        "MultiLineString": "geojson:MultiLineString",
-        "MultiPoint": "geojson:MultiPoint",
-        "MultiPolygon": "geojson:MultiPolygon",
-        "Point": "geojson:Point",
-        "Polygon": "geojson:Polygon",
-        "bbox": {
-          "@container": "@list",
-          "@id": "geojson:bbox"
-        },
-        "coordinates": "geojson:coordinates",
-        "datetime": "http://www.w3.org/2006/time#inXSDDateTime",
-        "description": "http://purl.org/dc/terms/description",
-        "features": {
-          "@container": "@set",
-          "@id": "geojson:features"
-        },
-        "geometry": "geojson:geometry",
-        "id": "@id",
-        "properties": "geojson:properties",
-        "start": "http://www.w3.org/2006/time#hasBeginning",
-        "stop": "http://www.w3.org/2006/time#hasEnding",
-        "title": "http://purl.org/dc/terms/title",
-        "type": "@type",
-        "when": "geojson:when"
-      }
-    }
-    for item in context_items or []:
-        t, uri = item.split("=")
-        ctx[t.strip()] = uri.strip()
-    return ctx
-
-
-def id_record(rec):
-    """Converts a record's id to a blank node id and returns the record."""
-    rec['id'] = '_:f%s' % rec['id']
-    return rec
-
-
 # Cat command
 @click.command(short_help="Concatenate and print the features of datasets")
- at files_in_arg
- at precision_opt
- at indent_opt
- at compact_opt
+ at cligj.files_in_arg
+ at cligj.precision_opt
+ at cligj.indent_opt
+ at cligj.compact_opt
 @click.option('--ignore-errors/--no-ignore-errors', default=False,
               help="log errors but do not stop serialization.")
 @options.dst_crs_opt
- at use_rs_opt
+ at cligj.use_rs_opt
 @click.option('--bbox', default=None, metavar="w,s,e,n",
               help="filter for features intersecting a bounding box")
 @click.pass_context
@@ -98,7 +44,7 @@ def cat(ctx, files, precision, indent, compact, ignore_errors, dst_crs,
     item_sep = compact and ',' or ', '
 
     try:
-        with fiona.drivers(CPL_DEBUG=verbosity>2):
+        with fiona.drivers(CPL_DEBUG=verbosity > 2):
             for path in files:
                 with fiona.open(path) as src:
                     if bbox:
@@ -121,455 +67,3 @@ def cat(ctx, files, precision, indent, compact, ignore_errors, dst_crs,
     except Exception:
         logger.exception("Exception caught during processing")
         raise click.Abort()
-
-
-# Collect command
- at click.command(short_help="Collect a sequence of features.")
- at precision_opt
- at indent_opt
- at compact_opt
- at click.option('--record-buffered/--no-record-buffered', default=False,
-              help="Economical buffering of writes at record, not collection "
-              "(default), level.")
- at click.option('--ignore-errors/--no-ignore-errors', default=False,
-              help="log errors but do not stop serialization.")
- at options.src_crs_opt
- at click.option('--with-ld-context/--without-ld-context', default=False,
-              help="add a JSON-LD context to JSON output.")
- at click.option('--add-ld-context-item', multiple=True,
-              help="map a term to a URI and add it to the output's JSON LD context.")
- at click.pass_context
-def collect(ctx, precision, indent, compact, record_buffered, ignore_errors,
-            src_crs, with_ld_context, add_ld_context_item):
-    """Make a GeoJSON feature collection from a sequence of GeoJSON
-    features and print it."""
-    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
-    logger = logging.getLogger('fio')
-    stdin = click.get_text_stream('stdin')
-    sink = click.get_text_stream('stdout')
-
-    dump_kwds = {'sort_keys': True}
-    if indent:
-        dump_kwds['indent'] = indent
-    if compact:
-        dump_kwds['separators'] = (',', ':')
-    item_sep = compact and ',' or ', '
-
-    if src_crs:
-        transformer = partial(transform_geom, src_crs, 'EPSG:4326',
-                              antimeridian_cutting=True, precision=precision)
-    else:
-        transformer = lambda x: x
-
-    first_line = next(stdin)
-
-    # If input is RS-delimited JSON sequence.
-    if first_line.startswith(u'\x1e'):
-        def feature_gen():
-            buffer = first_line.strip(u'\x1e')
-            for line in stdin:
-                if line.startswith(u'\x1e'):
-                    if buffer:
-                        feat = json.loads(buffer)
-                        feat['geometry'] = transformer(feat['geometry'])
-                        yield feat
-                    buffer = line.strip(u'\x1e')
-                else:
-                    buffer += line
-            else:
-                feat = json.loads(buffer)
-                feat['geometry'] = transformer(feat['geometry'])
-                yield feat
-    else:
-        def feature_gen():
-            feat = json.loads(first_line)
-            feat['geometry'] = transformer(feat['geometry'])
-            yield feat
-            for line in stdin:
-                feat = json.loads(line)
-                feat['geometry'] = transformer(feat['geometry'])
-                yield feat
-
-    try:
-        source = feature_gen()
-
-        if record_buffered:
-            # Buffer GeoJSON data at the feature level for smaller
-            # memory footprint.
-            indented = bool(indent)
-            rec_indent = "\n" + " " * (2 * (indent or 0))
-
-            collection = {
-                'type': 'FeatureCollection',
-                'features': [] }
-            if with_ld_context:
-                collection['@context'] = make_ld_context(
-                    add_ld_context_item)
-
-            head, tail = json.dumps(collection, **dump_kwds).split('[]')
-
-            sink.write(head)
-            sink.write("[")
-
-            # Try the first record.
-            try:
-                i, first = 0, next(source)
-                if with_ld_context:
-                    first = id_record(first)
-                if indented:
-                    sink.write(rec_indent)
-                sink.write(
-                    json.dumps(first, **dump_kwds
-                        ).replace("\n", rec_indent))
-            except StopIteration:
-                pass
-            except Exception as exc:
-                # Ignoring errors is *not* the default.
-                if ignore_errors:
-                    logger.error(
-                        "failed to serialize file record %d (%s), "
-                        "continuing",
-                        i, exc)
-                else:
-                    # Log error and close up the GeoJSON, leaving it
-                    # more or less valid no matter what happens above.
-                    logger.critical(
-                        "failed to serialize file record %d (%s), "
-                        "quiting",
-                        i, exc)
-                    sink.write("]")
-                    sink.write(tail)
-                    if indented:
-                        sink.write("\n")
-                    raise
-
-            # Because trailing commas aren't valid in JSON arrays
-            # we'll write the item separator before each of the
-            # remaining features.
-            for i, rec in enumerate(source, 1):
-                try:
-                    if with_ld_context:
-                        rec = id_record(rec)
-                    if indented:
-                        sink.write(rec_indent)
-                    sink.write(item_sep)
-                    sink.write(
-                        json.dumps(rec, **dump_kwds
-                            ).replace("\n", rec_indent))
-                except Exception as exc:
-                    if ignore_errors:
-                        logger.error(
-                            "failed to serialize file record %d (%s), "
-                            "continuing",
-                            i, exc)
-                    else:
-                        logger.critical(
-                            "failed to serialize file record %d (%s), "
-                            "quiting",
-                            i, exc)
-                        sink.write("]")
-                        sink.write(tail)
-                        if indented:
-                            sink.write("\n")
-                        raise
-
-            # Close up the GeoJSON after writing all features.
-            sink.write("]")
-            sink.write(tail)
-            if indented:
-                sink.write("\n")
-
-        else:
-            # Buffer GeoJSON data at the collection level. The default.
-            collection = {'type': 'FeatureCollection'}
-            if with_ld_context:
-                collection['@context'] = make_ld_context(
-                    add_ld_context_item)
-                collection['features'] = [
-                    id_record(rec) for rec in source]
-            else:
-                collection['features'] = list(source)
-            json.dump(collection, sink, **dump_kwds)
-            sink.write("\n")
-
-    except Exception:
-        logger.exception("Exception caught during processing")
-        raise click.Abort()
-
-
-# Distribute command
- at click.command(short_help="Distribute features from a collection")
- at use_rs_opt
- at click.pass_context
-def distrib(ctx, use_rs):
-    """Print the features of GeoJSON objects read from stdin.
-    """
-    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
-    logger = logging.getLogger('fio')
-    stdin = click.get_text_stream('stdin')
-    try:
-        source = obj_gen(stdin)
-        for i, obj in enumerate(source):
-            obj_id = obj.get('id', 'collection:' + str(i))
-            features = obj.get('features') or [obj]
-            for j, feat in enumerate(features):
-                if obj.get('type') == 'FeatureCollection':
-                    feat['parent'] = obj_id
-                feat_id = feat.get('id', 'feature:' + str(i))
-                feat['id'] = feat_id
-                if use_rs:
-                    click.echo(u'\u001e', nl=False)
-                click.echo(json.dumps(feat))
-    except Exception:
-        logger.exception("Exception caught during processing")
-        raise click.Abort()
-
-
-# Dump command
- at click.command(short_help="Dump a dataset to GeoJSON.")
- at click.argument('input', type=click.Path(), required=True)
- at click.option('--encoding', help="Specify encoding of the input file.")
- at precision_opt
- at indent_opt
- at compact_opt
- at click.option('--record-buffered/--no-record-buffered', default=False,
-    help="Economical buffering of writes at record, not collection "
-         "(default), level.")
- at click.option('--ignore-errors/--no-ignore-errors', default=False,
-              help="log errors but do not stop serialization.")
- at click.option('--with-ld-context/--without-ld-context', default=False,
-        help="add a JSON-LD context to JSON output.")
-
- at click.option('--add-ld-context-item', multiple=True,
-        help="map a term to a URI and add it to the output's JSON LD context.")
- at click.pass_context
-def dump(ctx, input, encoding, precision, indent, compact, record_buffered,
-         ignore_errors, with_ld_context, add_ld_context_item):
-    """Dump a dataset either as a GeoJSON feature collection (the default)
-    or a sequence of GeoJSON features."""
-    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
-    logger = logging.getLogger('fio')
-    sink = click.get_text_stream('stdout')
-
-    dump_kwds = {'sort_keys': True}
-    if indent:
-        dump_kwds['indent'] = indent
-    if compact:
-        dump_kwds['separators'] = (',', ':')
-    item_sep = compact and ',' or ', '
-
-    open_kwds = {}
-    if encoding:
-        open_kwds['encoding'] = encoding
-
-    def transformer(crs, feat):
-        tg = partial(transform_geom, crs, 'EPSG:4326',
-                     antimeridian_cutting=True, precision=precision)
-        feat['geometry'] = tg(feat['geometry'])
-        return feat
-
-    try:
-        with fiona.drivers(CPL_DEBUG=verbosity>2):
-            with fiona.open(input, **open_kwds) as source:
-                meta = source.meta
-                meta['fields'] = dict(source.schema['properties'].items())
-
-                if record_buffered:
-                    # Buffer GeoJSON data at the feature level for smaller
-                    # memory footprint.
-                    indented = bool(indent)
-                    rec_indent = "\n" + " " * (2 * (indent or 0))
-
-                    collection = {
-                        'type': 'FeatureCollection',
-                        'fiona:schema': meta['schema'],
-                        'fiona:crs': meta['crs'],
-                        'features': [] }
-                    if with_ld_context:
-                        collection['@context'] = make_ld_context(
-                            add_ld_context_item)
-
-                    head, tail = json.dumps(collection, **dump_kwds).split('[]')
-
-                    sink.write(head)
-                    sink.write("[")
-
-                    itr = iter(source)
-
-                    # Try the first record.
-                    try:
-                        i, first = 0, next(itr)
-                        first = transformer(first)
-                        if with_ld_context:
-                            first = id_record(first)
-                        if indented:
-                            sink.write(rec_indent)
-                        sink.write(
-                            json.dumps(first, **dump_kwds
-                                ).replace("\n", rec_indent))
-                    except StopIteration:
-                        pass
-                    except Exception as exc:
-                        # Ignoring errors is *not* the default.
-                        if ignore_errors:
-                            logger.error(
-                                "failed to serialize file record %d (%s), "
-                                "continuing",
-                                i, exc)
-                        else:
-                            # Log error and close up the GeoJSON, leaving it
-                            # more or less valid no matter what happens above.
-                            logger.critical(
-                                "failed to serialize file record %d (%s), "
-                                "quiting",
-                                i, exc)
-                            sink.write("]")
-                            sink.write(tail)
-                            if indented:
-                                sink.write("\n")
-                            raise
-
-                    # Because trailing commas aren't valid in JSON arrays
-                    # we'll write the item separator before each of the
-                    # remaining features.
-                    for i, rec in enumerate(itr, 1):
-                        rec = transformer(rec)
-                        try:
-                            if with_ld_context:
-                                rec = id_record(rec)
-                            if indented:
-                                sink.write(rec_indent)
-                            sink.write(item_sep)
-                            sink.write(
-                                json.dumps(rec, **dump_kwds
-                                    ).replace("\n", rec_indent))
-                        except Exception as exc:
-                            if ignore_errors:
-                                logger.error(
-                                    "failed to serialize file record %d (%s), "
-                                    "continuing",
-                                    i, exc)
-                            else:
-                                logger.critical(
-                                    "failed to serialize file record %d (%s), "
-                                    "quiting",
-                                    i, exc)
-                                sink.write("]")
-                                sink.write(tail)
-                                if indented:
-                                    sink.write("\n")
-                                raise
-
-                    # Close up the GeoJSON after writing all features.
-                    sink.write("]")
-                    sink.write(tail)
-                    if indented:
-                        sink.write("\n")
-
-                else:
-                    # Buffer GeoJSON data at the collection level. The default.
-                    collection = {
-                        'type': 'FeatureCollection',
-                        'fiona:schema': meta['schema'],
-                        'fiona:crs': meta['crs']}
-                    if with_ld_context:
-                        collection['@context'] = make_ld_context(
-                            add_ld_context_item)
-                        collection['features'] = [
-                            id_record(transformer(rec)) for rec in source]
-                    else:
-                        collection['features'] = [transformer(source.crs, rec) for rec in source]
-                    json.dump(collection, sink, **dump_kwds)
-
-    except Exception:
-        logger.exception("Exception caught during processing")
-        raise click.Abort()
-
-
-# Load command.
- at click.command(short_help="Load GeoJSON to a dataset in another format.")
- at click.argument('output', type=click.Path(), required=True)
- at click.option('-f', '--format', '--driver', required=True,
-              help="Output format driver name.")
- at options.src_crs_opt
- at click.option(
-    '--dst-crs', '--dst_crs',
-    help="Destination CRS.  Defaults to --src-crs when not given.")
- at click.option(
-    '--sequence / --no-sequence', default=False,
-    help="Specify whether the input stream is a LF-delimited sequence of GeoJSON "
-         "features (the default) or a single GeoJSON feature collection.")
- at click.pass_context
-def load(ctx, output, driver, src_crs, dst_crs, sequence):
-    """Load features from JSON to a file in another format.
-
-    The input is a GeoJSON feature collection or optionally a sequence of
-    GeoJSON feature objects."""
-    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
-    logger = logging.getLogger('fio')
-    stdin = click.get_text_stream('stdin')
-
-    dst_crs = dst_crs or src_crs
-
-    if src_crs and dst_crs and src_crs != dst_crs:
-        transformer = partial(transform_geom, src_crs, dst_crs,
-                              antimeridian_cutting=True, precision=-1)
-    else:
-        transformer = lambda x: x
-
-    first_line = next(stdin)
-
-    # If input is RS-delimited JSON sequence.
-    if first_line.startswith(u'\x1e'):
-        def feature_gen():
-            buffer = first_line.strip(u'\x1e')
-            for line in stdin:
-                if line.startswith(u'\x1e'):
-                    if buffer:
-                        feat = json.loads(buffer)
-                        feat['geometry'] = transformer(feat['geometry'])
-                        yield feat
-                    buffer = line.strip(u'\x1e')
-                else:
-                    buffer += line
-            else:
-                feat = json.loads(buffer)
-                feat['geometry'] = transformer(feat['geometry'])
-                yield feat
-    elif sequence:
-        def feature_gen():
-            yield json.loads(first_line)
-            for line in stdin:
-                feat = json.loads(line)
-                feat['geometry'] = transformer(feat['geometry'])
-                yield feat
-    else:
-        def feature_gen():
-            text = "".join(itertools.chain([first_line], stdin))
-            for feat in json.loads(text)['features']:
-                feat['geometry'] = transformer(feat['geometry'])
-                yield feat
-
-    try:
-        source = feature_gen()
-
-        # Use schema of first feature as a template.
-        # TODO: schema specified on command line?
-        first = next(source)
-        schema = {'geometry': first['geometry']['type']}
-        schema['properties'] = dict([
-            (k, FIELD_TYPES_MAP_REV.get(type(v)) or 'str')
-            for k, v in first['properties'].items()])
-
-        with fiona.drivers(CPL_DEBUG=verbosity>2):
-            with fiona.open(
-                    output, 'w',
-                    driver=driver,
-                    crs=dst_crs,
-                    schema=schema) as dst:
-                dst.write(first)
-                dst.writerecords(source)
-
-    except Exception:
-        logger.exception("Exception caught during processing")
-        raise click.Abort()
diff --git a/fiona/fio/collect.py b/fiona/fio/collect.py
new file mode 100644
index 0000000..83991ea
--- /dev/null
+++ b/fiona/fio/collect.py
@@ -0,0 +1,215 @@
+"""$ fio collect"""
+
+
+from functools import partial
+import json
+import logging
+
+import click
+import cligj
+
+from fiona.fio import helpers
+from fiona.fio import options
+from fiona.transform import transform_geom
+
+
+ at click.command(short_help="Collect a sequence of features.")
+ at cligj.precision_opt
+ at cligj.indent_opt
+ at cligj.compact_opt
+ at click.option('--record-buffered/--no-record-buffered', default=False,
+              help="Economical buffering of writes at record, not collection "
+              "(default), level.")
+ at click.option('--ignore-errors/--no-ignore-errors', default=False,
+              help="log errors but do not stop serialization.")
+ at options.src_crs_opt
+ at click.option('--with-ld-context/--without-ld-context', default=False,
+              help="add a JSON-LD context to JSON output.")
+ at click.option('--add-ld-context-item', multiple=True,
+              help="map a term to a URI and add it to the output's JSON LD "
+                   "context.")
+ at click.option('--parse/--no-parse', default=True,
+              help="load and dump the geojson feature (default is True)")
+ at click.pass_context
+def collect(ctx, precision, indent, compact, record_buffered, ignore_errors,
+            src_crs, with_ld_context, add_ld_context_item, parse):
+    """Make a GeoJSON feature collection from a sequence of GeoJSON
+    features and print it."""
+    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
+    logger = logging.getLogger('fio')
+    stdin = click.get_text_stream('stdin')
+    sink = click.get_text_stream('stdout')
+
+    dump_kwds = {'sort_keys': True}
+    if indent:
+        dump_kwds['indent'] = indent
+    if compact:
+        dump_kwds['separators'] = (',', ':')
+    item_sep = compact and ',' or ', '
+
+    if src_crs:
+        if not parse:
+            raise click.UsageError("Can't specify --src-crs with --no-parse")
+        transformer = partial(transform_geom, src_crs, 'EPSG:4326',
+                              antimeridian_cutting=True, precision=precision)
+    else:
+        transformer = lambda x: x
+
+    first_line = next(stdin)
+
+    # If parsing geojson
+    if parse:
+        # If input is RS-delimited JSON sequence.
+        if first_line.startswith(u'\x1e'):
+            def feature_text_gen():
+                buffer = first_line.strip(u'\x1e')
+                for line in stdin:
+                    if line.startswith(u'\x1e'):
+                        if buffer:
+                            feat = json.loads(buffer)
+                            feat['geometry'] = transformer(feat['geometry'])
+                            yield json.dumps(feat, **dump_kwds)
+                        buffer = line.strip(u'\x1e')
+                    else:
+                        buffer += line
+                else:
+                    feat = json.loads(buffer)
+                    feat['geometry'] = transformer(feat['geometry'])
+                    yield json.dumps(feat, **dump_kwds)
+        else:
+            def feature_text_gen():
+                feat = json.loads(first_line)
+                feat['geometry'] = transformer(feat['geometry'])
+                yield json.dumps(feat, **dump_kwds)
+
+                for line in stdin:
+                    feat = json.loads(line)
+                    feat['geometry'] = transformer(feat['geometry'])
+                    yield json.dumps(feat, **dump_kwds)
+
+    # If *not* parsing geojson
+    else:
+        # If input is RS-delimited JSON sequence.
+        if first_line.startswith(u'\x1e'):
+            def feature_text_gen():
+                buffer = first_line.strip(u'\x1e')
+                for line in stdin:
+                    if line.startswith(u'\x1e'):
+                        if buffer:
+                            yield buffer
+                        buffer = line.strip(u'\x1e')
+                    else:
+                        buffer += line
+                else:
+                    yield buffer
+        else:
+            def feature_text_gen():
+                yield first_line
+                for line in stdin:
+                    yield line
+
+    try:
+        source = feature_text_gen()
+
+        if record_buffered:
+            # Buffer GeoJSON data at the feature level for smaller
+            # memory footprint.
+            indented = bool(indent)
+            rec_indent = "\n" + " " * (2 * (indent or 0))
+
+            collection = {
+                'type': 'FeatureCollection',
+                'features': []}
+            if with_ld_context:
+                collection['@context'] = helpers.make_ld_context(
+                    add_ld_context_item)
+
+            head, tail = json.dumps(collection, **dump_kwds).split('[]')
+
+            sink.write(head)
+            sink.write("[")
+
+            # Try the first record.
+            try:
+                i, first = 0, next(source)
+                if with_ld_context:
+                    first = helpers.id_record(first)
+                if indented:
+                    sink.write(rec_indent)
+                sink.write(first.replace("\n", rec_indent))
+            except StopIteration:
+                pass
+            except Exception as exc:
+                # Ignoring errors is *not* the default.
+                if ignore_errors:
+                    logger.error(
+                        "failed to serialize file record %d (%s), "
+                        "continuing",
+                        i, exc)
+                else:
+                    # Log error and close up the GeoJSON, leaving it
+                    # more or less valid no matter what happens above.
+                    logger.critical(
+                        "failed to serialize file record %d (%s), "
+                        "quiting",
+                        i, exc)
+                    sink.write("]")
+                    sink.write(tail)
+                    if indented:
+                        sink.write("\n")
+                    raise
+
+            # Because trailing commas aren't valid in JSON arrays
+            # we'll write the item separator before each of the
+            # remaining features.
+            for i, rec in enumerate(source, 1):
+                try:
+                    if with_ld_context:
+                        rec = helpers.id_record(rec)
+                    if indented:
+                        sink.write(rec_indent)
+                    sink.write(item_sep)
+                    sink.write(rec.replace("\n", rec_indent))
+                except Exception as exc:
+                    if ignore_errors:
+                        logger.error(
+                            "failed to serialize file record %d (%s), "
+                            "continuing",
+                            i, exc)
+                    else:
+                        logger.critical(
+                            "failed to serialize file record %d (%s), "
+                            "quiting",
+                            i, exc)
+                        sink.write("]")
+                        sink.write(tail)
+                        if indented:
+                            sink.write("\n")
+                        raise
+
+            # Close up the GeoJSON after writing all features.
+            sink.write("]")
+            sink.write(tail)
+            if indented:
+                sink.write("\n")
+
+        else:
+            # Buffer GeoJSON data at the collection level. The default.
+            collection = {
+                'type': 'FeatureCollection',
+                'features': []}
+            if with_ld_context:
+                collection['@context'] = helpers.make_ld_context(
+                    add_ld_context_item)
+
+            head, tail = json.dumps(collection, **dump_kwds).split('[]')
+            sink.write(head)
+            sink.write("[")
+            sink.write(",".join(source))
+            sink.write("]")
+            sink.write(tail)
+            sink.write("\n")
+
+    except Exception:
+        logger.exception("Exception caught during processing")
+        raise click.Abort()
diff --git a/fiona/fio/distrib.py b/fiona/fio/distrib.py
new file mode 100644
index 0000000..308425d
--- /dev/null
+++ b/fiona/fio/distrib.py
@@ -0,0 +1,41 @@
+"""$ fio distrib"""
+
+
+import json
+import logging
+
+import click
+import cligj
+
+from fiona.fio import helpers
+
+
+ at click.command()
+ at cligj.use_rs_opt
+ at click.pass_context
+def distrib(ctx, use_rs):
+
+    """Distribute features from a collection.
+
+    Print the features of GeoJSON objects read from stdin.
+    """
+
+    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
+    logger = logging.getLogger('fio')
+    stdin = click.get_text_stream('stdin')
+    try:
+        source = helpers.obj_gen(stdin)
+        for i, obj in enumerate(source):
+            obj_id = obj.get('id', 'collection:' + str(i))
+            features = obj.get('features') or [obj]
+            for j, feat in enumerate(features):
+                if obj.get('type') == 'FeatureCollection':
+                    feat['parent'] = obj_id
+                feat_id = feat.get('id', 'feature:' + str(i))
+                feat['id'] = feat_id
+                if use_rs:
+                    click.echo(u'\u001e', nl=False)
+                click.echo(json.dumps(feat))
+    except Exception:
+        logger.exception("Exception caught during processing")
+        raise click.Abort()
diff --git a/fiona/fio/dump.py b/fiona/fio/dump.py
new file mode 100644
index 0000000..6968285
--- /dev/null
+++ b/fiona/fio/dump.py
@@ -0,0 +1,176 @@
+"""$ fio dump"""
+
+
+from functools import partial
+import json
+import logging
+
+import click
+import cligj
+
+import fiona
+from fiona.fio import helpers
+from fiona.transform import transform_geom
+
+
+ at click.command(short_help="Dump a dataset to GeoJSON.")
+ at click.argument('input', type=click.Path(), required=True)
+ at click.option('--encoding', help="Specify encoding of the input file.")
+ at cligj.precision_opt
+ at cligj.indent_opt
+ at cligj.compact_opt
+ at click.option('--record-buffered/--no-record-buffered', default=False,
+              help="Economical buffering of writes at record, not collection "
+                   "(default), level.")
+ at click.option('--ignore-errors/--no-ignore-errors', default=False,
+              help="log errors but do not stop serialization.")
+ at click.option('--with-ld-context/--without-ld-context', default=False,
+              help="add a JSON-LD context to JSON output.")
+ at click.option('--add-ld-context-item', multiple=True,
+              help="map a term to a URI and add it to the output's JSON LD "
+                   "context.")
+ at click.pass_context
+def dump(ctx, input, encoding, precision, indent, compact, record_buffered,
+         ignore_errors, with_ld_context, add_ld_context_item):
+
+    """Dump a dataset either as a GeoJSON feature collection (the default)
+    or a sequence of GeoJSON features."""
+
+    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
+    logger = logging.getLogger('fio')
+    sink = click.get_text_stream('stdout')
+
+    dump_kwds = {'sort_keys': True}
+    if indent:
+        dump_kwds['indent'] = indent
+    if compact:
+        dump_kwds['separators'] = (',', ':')
+    item_sep = compact and ',' or ', '
+
+    open_kwds = {}
+    if encoding:
+        open_kwds['encoding'] = encoding
+
+    def transformer(crs, feat):
+        tg = partial(transform_geom, crs, 'EPSG:4326',
+                     antimeridian_cutting=True, precision=precision)
+        feat['geometry'] = tg(feat['geometry'])
+        return feat
+
+    try:
+        with fiona.drivers(CPL_DEBUG=verbosity > 2):
+            with fiona.open(input, **open_kwds) as source:
+                meta = source.meta
+                meta['fields'] = dict(source.schema['properties'].items())
+
+                if record_buffered:
+                    # Buffer GeoJSON data at the feature level for smaller
+                    # memory footprint.
+                    indented = bool(indent)
+                    rec_indent = "\n" + " " * (2 * (indent or 0))
+
+                    collection = {
+                        'type': 'FeatureCollection',
+                        'fiona:schema': meta['schema'],
+                        'fiona:crs': meta['crs'],
+                        'features': []}
+                    if with_ld_context:
+                        collection['@context'] = helpers.make_ld_context(
+                            add_ld_context_item)
+
+                    head, tail = json.dumps(
+                        collection, **dump_kwds).split('[]')
+
+                    sink.write(head)
+                    sink.write("[")
+
+                    itr = iter(source)
+
+                    # Try the first record.
+                    try:
+                        i, first = 0, next(itr)
+                        first = transformer(first)
+                        if with_ld_context:
+                            first = helpers.id_record(first)
+                        if indented:
+                            sink.write(rec_indent)
+                        sink.write(json.dumps(
+                            first, **dump_kwds).replace("\n", rec_indent))
+                    except StopIteration:
+                        pass
+                    except Exception as exc:
+                        # Ignoring errors is *not* the default.
+                        if ignore_errors:
+                            logger.error(
+                                "failed to serialize file record %d (%s), "
+                                "continuing",
+                                i, exc)
+                        else:
+                            # Log error and close up the GeoJSON, leaving it
+                            # more or less valid no matter what happens above.
+                            logger.critical(
+                                "failed to serialize file record %d (%s), "
+                                "quiting",
+                                i, exc)
+                            sink.write("]")
+                            sink.write(tail)
+                            if indented:
+                                sink.write("\n")
+                            raise
+
+                    # Because trailing commas aren't valid in JSON arrays
+                    # we'll write the item separator before each of the
+                    # remaining features.
+                    for i, rec in enumerate(itr, 1):
+                        rec = transformer(rec)
+                        try:
+                            if with_ld_context:
+                                rec = helpers.id_record(rec)
+                            if indented:
+                                sink.write(rec_indent)
+                            sink.write(item_sep)
+                            sink.write(json.dumps(
+                                rec, **dump_kwds).replace("\n", rec_indent))
+                        except Exception as exc:
+                            if ignore_errors:
+                                logger.error(
+                                    "failed to serialize file record %d (%s), "
+                                    "continuing",
+                                    i, exc)
+                            else:
+                                logger.critical(
+                                    "failed to serialize file record %d (%s), "
+                                    "quiting",
+                                    i, exc)
+                                sink.write("]")
+                                sink.write(tail)
+                                if indented:
+                                    sink.write("\n")
+                                raise
+
+                    # Close up the GeoJSON after writing all features.
+                    sink.write("]")
+                    sink.write(tail)
+                    if indented:
+                        sink.write("\n")
+
+                else:
+                    # Buffer GeoJSON data at the collection level. The default.
+                    collection = {
+                        'type': 'FeatureCollection',
+                        'fiona:schema': meta['schema'],
+                        'fiona:crs': meta['crs']}
+                    if with_ld_context:
+                        collection['@context'] = helpers.make_ld_context(
+                            add_ld_context_item)
+                        collection['features'] = [
+                            helpers.id_record(transformer(rec))
+                            for rec in source]
+                    else:
+                        collection['features'] = [
+                            transformer(source.crs, rec) for rec in source]
+                    json.dump(collection, sink, **dump_kwds)
+
+    except Exception:
+        logger.exception("Exception caught during processing")
+        raise click.Abort()
diff --git a/fiona/fio/env.py b/fiona/fio/env.py
new file mode 100644
index 0000000..f5297f1
--- /dev/null
+++ b/fiona/fio/env.py
@@ -0,0 +1,29 @@
+"""$ fio env"""
+
+
+import logging
+
+import click
+
+import fiona
+
+
+ at click.command(short_help="Print information about the fio environment.")
+ at click.option('--formats', 'key', flag_value='formats', default=True,
+              help="Enumerate the available formats.")
+ at click.pass_context
+def env(ctx, key):
+
+    """Print information about the Fiona environment: available
+    formats, etc.
+    """
+
+    verbosity = (ctx.obj and ctx.obj.get('verbosity')) or 1
+    logger = logging.getLogger('fio')
+    stdout = click.get_text_stream('stdout')
+    with fiona.drivers(CPL_DEBUG=(verbosity > 2)) as env:
+        if key == 'formats':
+            for k, v in sorted(fiona.supported_drivers.items()):
+                modes = ', '.join("'" + m + "'" for m in v)
+                stdout.write("%s (modes %s)\n" % (k, modes))
+            stdout.write('\n')
diff --git a/fiona/fio/filter.py b/fiona/fio/filter.py
new file mode 100644
index 0000000..3914ffd
--- /dev/null
+++ b/fiona/fio/filter.py
@@ -0,0 +1,55 @@
+"""$ fio filter"""
+
+
+import json
+import logging
+
+import click
+from cligj import use_rs_opt
+
+from fiona.fio.helpers import obj_gen, eval_feature_expression
+
+
+ at click.command()
+ at click.argument('filter_expression')
+ at use_rs_opt
+ at click.pass_context
+def filter(ctx, filter_expression, use_rs):
+    """
+    Filter GeoJSON features by python expression.
+
+    Features are read from stdin.
+
+    The expression is evaluated in a restricted namespace containing:
+        - sum, pow, min, max and the imported math module
+        - shape (optional, imported from shapely.geometry if available)
+        - bool, int, str, len, float type conversions
+        - f (the feature to be evaluated,
+             allows item access via javascript-style dot notation using munch)
+
+    The expression will be evaluated for each feature and, if true,
+    the feature will be included in the output.
+
+    e.g. fio cat data.shp \
+         | fio filter "f.properties.area > 1000.0" \
+         | fio collect > large_polygons.geojson
+    """
+
+    logger = logging.getLogger('fio')
+    stdin = click.get_text_stream('stdin')
+
+    try:
+        source = obj_gen(stdin)
+        for i, obj in enumerate(source):
+            features = obj.get('features') or [obj]
+            for j, feat in enumerate(features):
+                if not eval_feature_expression(feat, filter_expression):
+                    continue
+
+                if use_rs:
+                    click.echo(u'\u001e', nl=False)
+                click.echo(json.dumps(feat))
+
+    except Exception:
+        logger.exception("Exception caught during processing")
+        raise click.Abort()
diff --git a/fiona/fio/helpers.py b/fiona/fio/helpers.py
index 76f261b..5e9bf39 100644
--- a/fiona/fio/helpers.py
+++ b/fiona/fio/helpers.py
@@ -1,11 +1,13 @@
 """
 Helper objects needed by multiple CLI commands.
 """
-
-
+from functools import partial
 import json
+import math
 import warnings
 
+from munch import munchify
+
 
 warnings.simplefilter('default')
 
@@ -31,3 +33,81 @@ def obj_gen(lines):
             for line in lines:
                 yield json.loads(line)
     return gen()
+
+
+def nullable(val, cast):
+    if val is None:
+        return None
+    else:
+        return cast(val)
+
+
+def eval_feature_expression(feature, expression):
+    safe_dict = {'f': munchify(feature)}
+    safe_dict.update({
+        'sum': sum,
+        'pow': pow,
+        'min': min,
+        'max': max,
+        'math': math,
+        'bool': bool,
+        'int': partial(nullable, int),
+        'str': partial(nullable, str),
+        'float': partial(nullable, float),
+        'len': partial(nullable, len),
+    })
+    try:
+        from shapely.geometry import shape
+        safe_dict['shape'] = shape
+    except ImportError:
+        pass
+    return eval(expression, {"__builtins__": None}, safe_dict)
+
+
+def make_ld_context(context_items):
+    """Returns a JSON-LD Context object.
+
+    See http://json-ld.org/spec/latest/json-ld."""
+    ctx = {
+      "@context": {
+        "geojson": "http://ld.geojson.org/vocab#",
+        "Feature": "geojson:Feature",
+        "FeatureCollection": "geojson:FeatureCollection",
+        "GeometryCollection": "geojson:GeometryCollection",
+        "LineString": "geojson:LineString",
+        "MultiLineString": "geojson:MultiLineString",
+        "MultiPoint": "geojson:MultiPoint",
+        "MultiPolygon": "geojson:MultiPolygon",
+        "Point": "geojson:Point",
+        "Polygon": "geojson:Polygon",
+        "bbox": {
+          "@container": "@list",
+          "@id": "geojson:bbox"
+        },
+        "coordinates": "geojson:coordinates",
+        "datetime": "http://www.w3.org/2006/time#inXSDDateTime",
+        "description": "http://purl.org/dc/terms/description",
+        "features": {
+          "@container": "@set",
+          "@id": "geojson:features"
+        },
+        "geometry": "geojson:geometry",
+        "id": "@id",
+        "properties": "geojson:properties",
+        "start": "http://www.w3.org/2006/time#hasBeginning",
+        "stop": "http://www.w3.org/2006/time#hasEnding",
+        "title": "http://purl.org/dc/terms/title",
+        "type": "@type",
+        "when": "geojson:when"
+      }
+    }
+    for item in context_items or []:
+        t, uri = item.split("=")
+        ctx[t.strip()] = uri.strip()
+    return ctx
+
+
+def id_record(rec):
+    """Converts a record's id to a blank node id and returns the record."""
+    rec['id'] = '_:f%s' % rec['id']
+    return rec
diff --git a/fiona/fio/info.py b/fiona/fio/info.py
index fde48c0..e77bc17 100644
--- a/fiona/fio/info.py
+++ b/fiona/fio/info.py
@@ -1,43 +1,24 @@
-"""
-Commands to get info about datasources and the Fiona environment
-"""
+"""$ fio info"""
 
 
-import code
 import logging
 import json
-import sys
 
 import click
 from cligj import indent_opt
 
 import fiona
 import fiona.crs
+from fiona.fio import options
 
 
- at click.command(short_help="Print information about the fio environment.")
- at click.option('--formats', 'key', flag_value='formats', default=True,
-              help="Enumerate the available formats.")
- at click.pass_context
-def env(ctx, key):
-    """Print information about the Fiona environment: available
-    formats, etc.
-    """
-    verbosity = (ctx.obj and ctx.obj.get('verbosity')) or 1
-    logger = logging.getLogger('fio')
-    stdout = click.get_text_stream('stdout')
-    with fiona.drivers(CPL_DEBUG=(verbosity > 2)) as env:
-        if key == 'formats':
-            for k, v in sorted(fiona.supported_drivers.items()):
-                modes = ', '.join("'" + m + "'" for m in v)
-                stdout.write("%s (modes %s)\n" % (k, modes))
-            stdout.write('\n')
-
-
-# Info command.
- at click.command(short_help="Print information about a dataset.")
+ at click.command()
 # One or more files.
 @click.argument('input', type=click.Path(exists=True))
+ at click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer,
+              help="Print information about a specific layer.  The first "
+                   "layer is used by default.  Layers use zero-based "
+                   "numbering when accessed by index.")
 @indent_opt
 # Options to pick out a single metadata item and print it as
 # a string.
@@ -50,15 +31,31 @@ def env(ctx, key):
 @click.option('--bounds', 'meta_member', flag_value='bounds',
               help="Print the boundary coordinates "
                    "(left, bottom, right, top).")
+ at click.option('--name', 'meta_member', flag_value='name',
+              help="Print the datasource's name.")
 @click.pass_context
-def info(ctx, input, indent, meta_member):
+def info(ctx, input, indent, meta_member, layer):
+
+    """
+    Print information about a dataset.
+
+    When working with a multi-layer dataset the first layer is used by default.
+    Use the '--layer' option to select a different layer.
+    """
+
     verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
     logger = logging.getLogger('fio')
     try:
-        with fiona.drivers(CPL_DEBUG=verbosity>2):
-            with fiona.open(input) as src:
+        with fiona.drivers(CPL_DEBUG=verbosity > 2):
+            with fiona.open(input, layer=layer) as src:
                 info = src.meta
-                info.update(bounds=src.bounds, count=len(src))
+                info.update(bounds=src.bounds, name=src.name)
+                try:
+                    info.update(count=len(src))
+                except TypeError as e:
+                    info.update(count=None)
+                    logger.debug("Setting 'count' to None/null - layer does "
+                                 "not support counting")
                 proj4 = fiona.crs.to_string(src.crs)
                 if proj4.startswith('+init=epsg'):
                     proj4 = proj4.split('=')[1].upper()
@@ -74,40 +71,3 @@ def info(ctx, input, indent, meta_member):
     except Exception:
         logger.exception("Exception caught during processing")
         raise click.Abort()
-
-
-# Insp command.
- at click.command(short_help="Open a dataset and start an interpreter.")
- at click.argument('src_path', type=click.Path(exists=True))
- at click.option('--ipython', 'interpreter', flag_value='ipython',
-              help="Use IPython as interpreter.")
- at click.pass_context
-def insp(ctx, src_path, interpreter):
-
-    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
-    logger = logging.getLogger('fio')
-
-    banner = 'Fiona %s Interactive Inspector (Python %s)\n' \
-             'Type "src.schema", "next(src)", or "help(src)" ' \
-             'for more information.' \
-             % (fiona.__version__, '.'.join(map(str, sys.version_info[:3])))
-
-    try:
-        with fiona.drivers(CPL_DEBUG=verbosity > 2):
-            with fiona.open(src_path) as src:
-
-                scope = locals()
-
-                if not interpreter:
-                    code.interact(banner, local=scope)
-                elif interpreter == 'ipython':
-                    import IPython
-                    IPython.InteractiveShell.banner1 = banner
-                    IPython.start_ipython(argv=[], user_ns=scope)
-                else:
-                    raise click.ClickException('Interpreter %s is unsupported or missing '
-                                               'dependencies' % interpreter)
-
-    except Exception:
-        logger.exception("Exception caught during processing")
-        raise click.Abort()
diff --git a/fiona/fio/insp.py b/fiona/fio/insp.py
new file mode 100644
index 0000000..c36191b
--- /dev/null
+++ b/fiona/fio/insp.py
@@ -0,0 +1,46 @@
+"""$ fio insp"""
+
+
+import code
+import logging
+import sys
+
+import click
+
+import fiona
+
+
+ at click.command(short_help="Open a dataset and start an interpreter.")
+ at click.argument('src_path', type=click.Path(exists=True))
+ at click.option('--ipython', 'interpreter', flag_value='ipython',
+              help="Use IPython as interpreter.")
+ at click.pass_context
+def insp(ctx, src_path, interpreter):
+
+    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
+    logger = logging.getLogger('fio')
+
+    banner = 'Fiona %s Interactive Inspector (Python %s)\n' \
+             'Type "src.schema", "next(src)", or "help(src)" ' \
+             'for more information.' \
+             % (fiona.__version__, '.'.join(map(str, sys.version_info[:3])))
+
+    try:
+        with fiona.drivers(CPL_DEBUG=verbosity > 2):
+            with fiona.open(src_path) as src:
+
+                scope = locals()
+
+                if not interpreter:
+                    code.interact(banner, local=scope)
+                elif interpreter == 'ipython':
+                    import IPython
+                    IPython.InteractiveShell.banner1 = banner
+                    IPython.start_ipython(argv=[], user_ns=scope)
+                else:
+                    raise click.ClickException(
+                        'Interpreter {} is unsupported or missing '
+                        'dependencies'.format(interpreter))
+    except Exception:
+        logger.exception("Exception caught during processing")
+        raise click.Abort()
diff --git a/fiona/fio/load.py b/fiona/fio/load.py
new file mode 100644
index 0000000..7556df5
--- /dev/null
+++ b/fiona/fio/load.py
@@ -0,0 +1,107 @@
+"""$ fio load"""
+
+
+from functools import partial
+import itertools
+import json
+import logging
+
+import click
+
+import fiona
+from fiona.fio import options
+from fiona.transform import transform_geom
+
+
+FIELD_TYPES_MAP_REV = dict([(v, k) for k, v in fiona.FIELD_TYPES_MAP.items()])
+
+
+ at click.command(short_help="Load GeoJSON to a dataset in another format.")
+ at click.argument('output', type=click.Path(), required=True)
+ at click.option('-f', '--format', '--driver', required=True,
+              help="Output format driver name.")
+ at options.src_crs_opt
+ at click.option('--dst-crs', '--dst_crs',
+              help="Destination CRS.  Defaults to --src-crs when not given.")
+ at click.option('--sequence / --no-sequence', default=False,
+              help="Specify whether the input stream is a LF-delimited "
+                   "sequence of GeoJSON features (the default) or a single "
+                   "GeoJSON feature collection.")
+ at click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer,
+              help="Load features into specified layer.  Layers use "
+                   "zero-based numbering when accessed by index.")
+ at click.pass_context
+def load(ctx, output, driver, src_crs, dst_crs, sequence, layer):
+    """Load features from JSON to a file in another format.
+
+    The input is a GeoJSON feature collection or optionally a sequence of
+    GeoJSON feature objects."""
+    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
+    logger = logging.getLogger('fio')
+    stdin = click.get_text_stream('stdin')
+
+    dst_crs = dst_crs or src_crs
+
+    if src_crs and dst_crs and src_crs != dst_crs:
+        transformer = partial(transform_geom, src_crs, dst_crs,
+                              antimeridian_cutting=True, precision=-1)
+    else:
+        transformer = lambda x: x
+
+    first_line = next(stdin)
+
+    # If input is RS-delimited JSON sequence.
+    if first_line.startswith(u'\x1e'):
+        def feature_gen():
+            buffer = first_line.strip(u'\x1e')
+            for line in stdin:
+                if line.startswith(u'\x1e'):
+                    if buffer:
+                        feat = json.loads(buffer)
+                        feat['geometry'] = transformer(feat['geometry'])
+                        yield feat
+                    buffer = line.strip(u'\x1e')
+                else:
+                    buffer += line
+            else:
+                feat = json.loads(buffer)
+                feat['geometry'] = transformer(feat['geometry'])
+                yield feat
+    elif sequence:
+        def feature_gen():
+            yield json.loads(first_line)
+            for line in stdin:
+                feat = json.loads(line)
+                feat['geometry'] = transformer(feat['geometry'])
+                yield feat
+    else:
+        def feature_gen():
+            text = "".join(itertools.chain([first_line], stdin))
+            for feat in json.loads(text)['features']:
+                feat['geometry'] = transformer(feat['geometry'])
+                yield feat
+
+    try:
+        source = feature_gen()
+
+        # Use schema of first feature as a template.
+        # TODO: schema specified on command line?
+        first = next(source)
+        schema = {'geometry': first['geometry']['type']}
+        schema['properties'] = dict([
+            (k, FIELD_TYPES_MAP_REV.get(type(v)) or 'str')
+            for k, v in first['properties'].items()])
+
+        with fiona.drivers(CPL_DEBUG=verbosity > 2):
+            with fiona.open(
+                    output, 'w',
+                    driver=driver,
+                    crs=dst_crs,
+                    schema=schema,
+                    layer=layer) as dst:
+                dst.write(first)
+                dst.writerecords(source)
+
+    except Exception:
+        logger.exception("Exception caught during processing")
+        raise click.Abort()
diff --git a/fiona/fio/ls.py b/fiona/fio/ls.py
new file mode 100644
index 0000000..c3828b0
--- /dev/null
+++ b/fiona/fio/ls.py
@@ -0,0 +1,26 @@
+"""$ fiona ls"""
+
+
+import json
+
+import click
+from cligj import indent_opt
+
+import fiona
+
+
+ at click.command()
+ at click.argument('input', type=click.Path(exists=True))
+ at indent_opt
+ at click.pass_context
+def ls(ctx, input, indent):
+
+    """
+    List layers in a datasource.
+    """
+
+    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
+
+    with fiona.drivers(CPL_DEBUG=verbosity > 2):
+        result = fiona.listlayers(input)
+        click.echo(json.dumps(result, indent=indent))
diff --git a/fiona/fio/options.py b/fiona/fio/options.py
index 47fb1ae..6a7a62b 100644
--- a/fiona/fio/options.py
+++ b/fiona/fio/options.py
@@ -6,3 +6,12 @@ import click
 
 src_crs_opt = click.option('--src-crs', '--src_crs', help="Source CRS.")
 dst_crs_opt = click.option('--dst-crs', '--dst_crs', help="Destination CRS.")
+
+
+def cb_layer(ctx, param, value):
+    """Let --layer be a name or index."""
+
+    if value is None or not value.isdigit():
+        return value
+    else:
+        return int(value)
diff --git a/fiona/ograpi.pxd b/fiona/ograpi1.pxd
similarity index 98%
copy from fiona/ograpi.pxd
copy to fiona/ograpi1.pxd
index c84ca7d..752c85e 100644
--- a/fiona/ograpi.pxd
+++ b/fiona/ograpi1.pxd
@@ -12,6 +12,7 @@ cdef extern from "cpl_conv.h":
     void *  CPLMalloc (size_t)
     void    CPLFree (void *ptr)
     void    CPLSetThreadLocalConfigOption (char *key, char *val)
+    void    CPLSetConfigOption (char *key, char *val)
     const char *CPLGetConfigOption (char *, char *)
 
 cdef extern from "cpl_string.h":
@@ -62,6 +63,7 @@ cdef extern from "ogr_api.h":
     void *  OGR_Dr_CreateDataSource (void *driver, const char *path, char **options)
     int     OGR_Dr_DeleteDataSource (void *driver, char *)
     void *  OGR_Dr_Open (void *driver, const char *path, int bupdate)
+    int     OGR_Dr_TestCapability (void *driver, const char *)
     int     OGR_DS_DeleteLayer (void *datasource, int n)
     void *  OGR_DS_CreateLayer (void *datasource, char *name, void *crs, int geomType, char **options)
     void *  OGR_DS_ExecuteSQL (void *datasource, char *name, void *filter, char *dialext)
diff --git a/fiona/ograpi.pxd b/fiona/ograpi2.pxd
similarity index 76%
rename from fiona/ograpi.pxd
rename to fiona/ograpi2.pxd
index c84ca7d..149cb9b 100644
--- a/fiona/ograpi.pxd
+++ b/fiona/ograpi2.pxd
@@ -4,6 +4,55 @@
 
 cdef extern from "gdal.h":
     char * GDALVersionInfo (char *pszRequest)
+    void * GDALGetDriverByName(const char * pszName)
+    void * GDALOpenEx(const char * pszFilename,
+                      unsigned int nOpenFlags,
+                      const char ** papszAllowedDrivers,
+                      const char ** papszOpenOptions,
+                      const char *const *papszSibling1Files
+                      )
+    int GDAL_OF_UPDATE
+    int GDAL_OF_READONLY
+    int GDAL_OF_VECTOR
+    int GDAL_OF_VERBOSE_ERROR
+    int GDALDatasetGetLayerCount(void * hds)
+    void * GDALDatasetGetLayer(void * hDS, int iLayer)
+    void * GDALDatasetGetLayerByName(void * hDS, char * pszName)
+    void GDALClose(void * hDS)
+    void * GDALGetDatasetDriver(void * hDataset)
+    void * GDALCreate(void * hDriver,
+                      const char * pszFilename,
+                      int nXSize,
+                      int     nYSize,
+                      int     nBands,
+                      GDALDataType eBandType,
+                      char ** papszOptions)
+    void * GDALDatasetCreateLayer(void * hDS,
+                                  const char * pszName,
+                                  void * hSpatialRef,
+                                  int eType,
+                                  char ** papszOptions)
+    int GDALDatasetDeleteLayer(void * hDS, int iLayer)
+    void GDALFlushCache(void * hDS)
+    char * GDALGetDriverShortName(void * hDriver)
+    char * GDALGetDatasetDriver (void * hDataset)
+    int GDALDeleteDataset(void * hDriver, const char * pszFilename)
+
+
+    ctypedef enum GDALDataType:
+        GDT_Unknown
+        GDT_Byte
+        GDT_UInt16
+        GDT_Int16
+        GDT_UInt32
+        GDT_Int32
+        GDT_Float32
+        GDT_Float64
+        GDT_CInt16
+        GDT_CInt32
+        GDT_CFloat32
+        GDT_CFloat64
+        GDT_TypeCount
 
 cdef extern from "gdal_version.h":
     int    GDAL_COMPUTE_VERSION(int maj, int min, int rev)
@@ -62,16 +111,7 @@ cdef extern from "ogr_api.h":
     void *  OGR_Dr_CreateDataSource (void *driver, const char *path, char **options)
     int     OGR_Dr_DeleteDataSource (void *driver, char *)
     void *  OGR_Dr_Open (void *driver, const char *path, int bupdate)
-    int     OGR_DS_DeleteLayer (void *datasource, int n)
-    void *  OGR_DS_CreateLayer (void *datasource, char *name, void *crs, int geomType, char **options)
-    void *  OGR_DS_ExecuteSQL (void *datasource, char *name, void *filter, char *dialext)
-    void    OGR_DS_Destroy (void *datasource)
-    void *  OGR_DS_GetDriver (void *layer_defn)
-    void *  OGR_DS_GetLayerByName (void *datasource, char *name)
-    int     OGR_DS_GetLayerCount (void *datasource)
-    void *  OGR_DS_GetLayer (void *datasource, int n)
-    void    OGR_DS_ReleaseResultSet (void *datasource, void *results)
-    int     OGR_DS_SyncToDisk (void *datasource)
+    int     OGR_Dr_TestCapability (void *driver, const char *)
     void *  OGR_F_Create (void *featuredefn)
     void    OGR_F_Destroy (void *feature)
     long    OGR_F_GetFID (void *feature)
@@ -143,3 +183,5 @@ cdef extern from "ogr_api.h":
     void *  OGROpenShared (char *path, int mode, void *x)
     int     OGRReleaseDataSource (void *datasource)
     OGRErr  OGR_L_SetNextByIndex (void *layer, long nIndex)
+    long long OGR_F_GetFieldAsInteger64 (void *feature, int n)
+    void    OGR_F_SetFieldInteger64 (void *feature, int n, long long value)
diff --git a/fiona/ogrext.pyx b/fiona/ogrext1.pyx
similarity index 96%
copy from fiona/ogrext.pyx
copy to fiona/ogrext1.pyx
index 6004f4e..56ae391 100644
--- a/fiona/ogrext.pyx
+++ b/fiona/ogrext1.pyx
@@ -17,8 +17,7 @@ from fiona._geometry cimport GeomBuilder, OGRGeomBuilder
 from fiona._err import cpl_errs
 from fiona._geometry import GEOMETRY_TYPES
 from fiona.errors import (
-    DriverError, SchemaError, CRSError, FionaValueError, FieldNameEncodeError,
-    StringFieldEncodeError, StringFieldDecodeError)
+    DriverError, SchemaError, CRSError, FionaValueError, FieldNameEncodeError)
 from fiona.odict import OrderedDict
 from fiona.rfc3339 import parse_date, parse_datetime, parse_time
 from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType
@@ -63,6 +62,10 @@ FIELD_TYPES_MAP = {
     'datetime': FionaDateTimeType
    }
 
+# OGR Driver capability
+ODrCCreateDataSource = b"CreateDataSource"
+ODrCDeleteDataSource = b"DeleteDataSource"
+
 # OGR Layer capability
 OLC_RANDOMREAD = b"RandomRead"
 OLC_SEQUENTIALWRITE = b"SequentialWrite"
@@ -92,10 +95,6 @@ OGRERR_FAILURE = 6
 OGRERR_UNSUPPORTED_SRS = 7
 OGRERR_INVALID_HANDLE = 8
 
-# Recent versions of OGR can sometimes detect file encoding, but don't
-# provide access yet to the detected encoding. Hence this variable.
-OGR_DETECTED_ENCODING = '-ogr-detected-encoding'
-
 
 def _explode(coords):
     """Explode a GeoJSON geometry's coordinates object and yield
@@ -184,10 +183,10 @@ cdef class FeatureBuilder:
                 try:
                     val = ograpi.OGR_F_GetFieldAsString(feature, i)
                     val = val.decode(encoding)
-                except UnicodeError as exc:
-                    raise StringFieldDecodeError(
-                        "Failed to decode {0} using {1} codec: {2}".format(
-                            val, encoding, str(exc)))
+                except UnicodeError:
+                    log.error("Failed to decode property '%s' value '%s'",
+                              key, val)
+                    raise
 
                 # Does the text contain a JSON object? Let's check.
                 # Let's check as cheaply as we can.
@@ -267,10 +266,10 @@ cdef class OGRFeatureBuilder:
             # Catch and re-raise unicode encoding errors.
             try:
                 key_bytes = ogr_key.encode(encoding)
-            except UnicodeError as exc:
-                raise FieldNameEncodeError(
-                    "Failed to encode {0} using {1} codec: {2}".format(
-                        key, encoding, str(exc)))
+            except UnicodeError:
+                log.error("Failed to encode property '%s' value '%s'",
+                          key, value)
+                raise
 
             key_c = key_bytes
             i = ograpi.OGR_F_GetFieldIndex(cogr_feature, key_c)
@@ -313,15 +312,13 @@ cdef class OGRFeatureBuilder:
                 ograpi.OGR_F_SetFieldDateTime(
                     cogr_feature, i, 0, 0, 0, hh, mm, ss, 0)
             elif isinstance(value, string_types):
-                
-                # Catch and re-raise string field value encoding errors.
+                # Catch, log, and re-raise string field value encoding errors.
                 try:
                     value_bytes = value.encode(encoding)
-                except UnicodeError as exc:
-                    raise StringFieldEncodeError(
-                        "Failed to encode {0} using {1} codec: {2}".format(
-                            value, encoding, str(exc)))
-
+                except UnicodeError:
+                    log.error("Failed to encode property '%s' value '%s'",
+                              key, value)
+                    raise
                 string_c = value_bytes
                 ograpi.OGR_F_SetFieldString(cogr_feature, i, string_c)
             elif value is None:
@@ -445,7 +442,7 @@ cdef class Session:
             self._fileencoding = (
                 ograpi.OGR_L_TestCapability(
                     self.cogr_layer, OLC_STRINGSASUTF8) and
-                OGR_DETECTED_ENCODING) or (
+                'utf-8') or (
                 self.get_driver() == "ESRI Shapefile" and
                 'ISO-8859-1') or locale.getpreferredencoding().upper()
 
@@ -740,7 +737,7 @@ cdef class WritingSession(Session):
             userencoding = self.collection.encoding
             self._fileencoding = (userencoding or (
                 ograpi.OGR_L_TestCapability(self.cogr_layer, OLC_STRINGSASUTF8) and
-                OGR_DETECTED_ENCODING) or (
+                'utf-8') or (
                 self.get_driver() == "ESRI Shapefile" and
                 'ISO-8859-1') or locale.getpreferredencoding()).upper()
 
@@ -837,9 +834,10 @@ cdef class WritingSession(Session):
 
             fileencoding = self.get_fileencoding()
             if fileencoding:
-                fileencoding_b = fileencoding.encode()
+                fileencoding_b = fileencoding.encode('utf-8')
                 fileencoding_c = fileencoding_b
                 options = ograpi.CSLSetNameValue(options, "ENCODING", fileencoding_c)
+                log.debug("Output file encoding: %s", fileencoding)
 
             # Does the layer exist already? If so, we delete it.
             layer_count = ograpi.OGR_DS_GetLayerCount(self.cogr_ds)
@@ -865,7 +863,7 @@ cdef class WritingSession(Session):
             name_b = collection.name.encode('utf-8')
             name_c = name_b
             self.cogr_layer = ograpi.OGR_DS_CreateLayer(
-                self.cogr_ds, 
+                self.cogr_ds,
                 name_c,
                 cogr_srs,
                 <unsigned int>[k for k,v in GEOMETRY_TYPES.items() if 
@@ -1187,6 +1185,27 @@ cdef class KeysIterator(Iterator):
         return fid
 
 
+def _remove(path, driver=None):
+    """Deletes an OGR data source
+    """
+    cdef void *cogr_driver
+    cdef int result
+
+    if driver is None:
+        driver = 'ESRI Shapefile'
+
+    cogr_driver = ograpi.OGRGetDriverByName(driver.encode('utf-8'))
+    if cogr_driver == NULL:
+        raise ValueError("Null driver")
+
+    if not ograpi.OGR_Dr_TestCapability(cogr_driver, ODrCDeleteDataSource):
+        raise RuntimeError("Driver does not support dataset removal operation")
+
+    result = ograpi.OGR_Dr_DeleteDataSource(cogr_driver, path.encode('utf-8'))
+    if result != OGRERR_NONE:
+        raise RuntimeError("Failed to remove data source {}".format(path))
+
+
 def _listlayers(path):
 
     """Provides a list of the layers in an OGR data source.
diff --git a/fiona/ogrext.pyx b/fiona/ogrext2.pyx
similarity index 85%
rename from fiona/ogrext.pyx
rename to fiona/ogrext2.pyx
index 6004f4e..9172bb9 100644
--- a/fiona/ogrext.pyx
+++ b/fiona/ogrext2.pyx
@@ -16,13 +16,14 @@ from fiona cimport ograpi
 from fiona._geometry cimport GeomBuilder, OGRGeomBuilder
 from fiona._err import cpl_errs
 from fiona._geometry import GEOMETRY_TYPES
-from fiona.errors import (
-    DriverError, SchemaError, CRSError, FionaValueError, FieldNameEncodeError,
-    StringFieldEncodeError, StringFieldDecodeError)
+from fiona.errors import DriverError, SchemaError, CRSError, FionaValueError
 from fiona.odict import OrderedDict
 from fiona.rfc3339 import parse_date, parse_datetime, parse_time
 from fiona.rfc3339 import FionaDateType, FionaDateTimeType, FionaTimeType
 
+from libc.stdlib cimport malloc, free
+from libc.string cimport strcmp
+
 
 log = logging.getLogger("Fiona")
 class NullHandler(logging.Handler):
@@ -49,8 +50,8 @@ FIELD_TYPES = [
     'date',         # OFTDate, Date
     'time',         # OFTTime, Time
     'datetime',     # OFTDateTime, Date and Time
-    'int',          # OFTInteger64, Single 64bit integer #Not supported
-    None,           # OFTInteger64List, List of 64bit integers #Not supported
+    'int',          # OFTInteger64, Single 64bit integer
+    None,           # OFTInteger64List, List of 64bit integers
     ]
 
 # Mapping of Fiona field type names to Python types.
@@ -63,6 +64,10 @@ FIELD_TYPES_MAP = {
     'datetime': FionaDateTimeType
    }
 
+# OGR Driver capability
+ODrCCreateDataSource = b"CreateDataSource"
+ODrCDeleteDataSource = b"DeleteDataSource"
+
 # OGR Layer capability
 OLC_RANDOMREAD = b"RandomRead"
 OLC_SEQUENTIALWRITE = b"SequentialWrite"
@@ -92,10 +97,19 @@ OGRERR_FAILURE = 6
 OGRERR_UNSUPPORTED_SRS = 7
 OGRERR_INVALID_HANDLE = 8
 
-# Recent versions of OGR can sometimes detect file encoding, but don't
-# provide access yet to the detected encoding. Hence this variable.
-OGR_DETECTED_ENCODING = '-ogr-detected-encoding'
 
+cdef char ** string_list(list_str):
+    """
+    Function by Stackoverflow User falsetru
+    https://stackoverflow.com/questions/17511309/fast-string-array-cython
+    """
+    cdef char* s
+    cdef char **ret = <char **>malloc(len(list_str) * sizeof(char *))
+    for i in range(len(list_str)):
+        s = list_str[i]
+        ret[i] = s
+    ret[i + 1] = NULL
+    return ret
 
 def _explode(coords):
     """Explode a GeoJSON geometry's coordinates object and yield
@@ -176,7 +190,7 @@ cdef class FeatureBuilder:
             if not ograpi.OGR_F_IsFieldSet(feature, i):
                 props[key] = None
             elif fieldtype is int:
-                props[key] = ograpi.OGR_F_GetFieldAsInteger(feature, i)
+                props[key] = ograpi.OGR_F_GetFieldAsInteger64(feature, i)
             elif fieldtype is float:
                 props[key] = ograpi.OGR_F_GetFieldAsDouble(feature, i)
 
@@ -184,10 +198,9 @@ cdef class FeatureBuilder:
                 try:
                     val = ograpi.OGR_F_GetFieldAsString(feature, i)
                     val = val.decode(encoding)
-                except UnicodeError as exc:
-                    raise StringFieldDecodeError(
-                        "Failed to decode {0} using {1} codec: {2}".format(
-                            val, encoding, str(exc)))
+                except UnicodeDecodeError:
+                    log.warn(
+                        "Failed to decode %s using %s codec", val, encoding)
 
                 # Does the text contain a JSON object? Let's check.
                 # Let's check as cheaply as we can.
@@ -263,15 +276,11 @@ cdef class OGRFeatureBuilder:
                 "Looking up %s in %s", key, repr(session._schema_mapping))
             ogr_key = session._schema_mapping[key]
             schema_type = collection.schema['properties'][key]
-
-            # Catch and re-raise unicode encoding errors.
             try:
                 key_bytes = ogr_key.encode(encoding)
-            except UnicodeError as exc:
-                raise FieldNameEncodeError(
-                    "Failed to encode {0} using {1} codec: {2}".format(
-                        key, encoding, str(exc)))
-
+            except UnicodeDecodeError:
+                log.warn("Failed to encode %s using %s codec", key, encoding)
+                key_bytes = ogr_key
             key_c = key_bytes
             i = ograpi.OGR_F_GetFieldIndex(cogr_feature, key_c)
             if i < 0:
@@ -283,7 +292,7 @@ cdef class OGRFeatureBuilder:
 
             # Continue over the standard OGR types.
             if isinstance(value, integer_types):
-                ograpi.OGR_F_SetFieldInteger(cogr_feature, i, value)
+                ograpi.OGR_F_SetFieldInteger64(cogr_feature, i, value)
             elif isinstance(value, float):
                 ograpi.OGR_F_SetFieldDouble(cogr_feature, i, value)
             elif (isinstance(value, string_types) 
@@ -313,15 +322,12 @@ cdef class OGRFeatureBuilder:
                 ograpi.OGR_F_SetFieldDateTime(
                     cogr_feature, i, 0, 0, 0, hh, mm, ss, 0)
             elif isinstance(value, string_types):
-                
-                # Catch and re-raise string field value encoding errors.
                 try:
                     value_bytes = value.encode(encoding)
-                except UnicodeError as exc:
-                    raise StringFieldEncodeError(
-                        "Failed to encode {0} using {1} codec: {2}".format(
-                            value, encoding, str(exc)))
-
+                except UnicodeDecodeError:
+                    log.warn(
+                        "Failed to encode %s using %s codec", value, encoding)
+                    value_bytes = value
                 string_c = value_bytes
                 ograpi.OGR_F_SetFieldString(cogr_feature, i, string_c)
             elif value is None:
@@ -381,14 +387,14 @@ cdef class Session:
         cdef const char *name_c = NULL
         cdef void *drv = NULL
         cdef void *ds = NULL
-
+        cdef char ** drvs = NULL
         if collection.path == '-':
             path = '/vsistdin/'
         else:
             path = collection.path
         try:
             path_b = path.encode('utf-8')
-        except UnicodeError:
+        except UnicodeDecodeError:
             # Presume already a UTF-8 encoded string
             path_b = path
         path_c = path_b
@@ -404,15 +410,32 @@ cdef class Session:
                     name_b = name.encode()
                     name_c = name_b
                     log.debug("Trying driver: %s", name)
-                    drv = ograpi.OGRGetDriverByName(name_c)
+                    drv = ograpi.GDALGetDriverByName(name_c)
                     if drv != NULL:
-                        ds = ograpi.OGR_Dr_Open(drv, path_c, 0)
+                        drvs = string_list([name_b])
+
+                        flags = ograpi.GDAL_OF_VECTOR | ograpi.GDAL_OF_READONLY
+                        log.debug("GDALOpenEx({}, {}, {})".format(path_c, flags, [name_b]))
+                        ds = ograpi.GDALOpenEx(path_c,
+                                               flags,
+                                               drvs,
+                                               NULL,
+                                               NULL)
+                        free(drvs)
                     if ds != NULL:
                         self.cogr_ds = ds
                         collection._driver = name
+                        _driver = ograpi.GDALGetDatasetDriver(ds)
+                        drv_name = ograpi.GDALGetDriverShortName(_driver)
+                        log.debug("Driver: {} Success".format(drv_name))
+
                         break
             else:
-                self.cogr_ds = ograpi.OGROpen(path_c, 0, NULL)
+                self.cogr_ds = ograpi.GDALOpenEx(path_c,
+                                                 ograpi.GDAL_OF_VECTOR | ograpi.GDAL_OF_READONLY,
+                                                 NULL,
+                                                 NULL,
+                                                 NULL)
 
         if self.cogr_ds == NULL:
             raise FionaValueError(
@@ -423,10 +446,10 @@ cdef class Session:
         if isinstance(collection.name, string_types):
             name_b = collection.name.encode('utf-8')
             name_c = name_b
-            self.cogr_layer = ograpi.OGR_DS_GetLayerByName(
+            self.cogr_layer = ograpi.GDALDatasetGetLayerByName(
                                 self.cogr_ds, name_c)
         elif isinstance(collection.name, int):
-            self.cogr_layer = ograpi.OGR_DS_GetLayer(
+            self.cogr_layer = ograpi.GDALDatasetGetLayer(
                                 self.cogr_ds, collection.name)
             name_c = ograpi.OGR_L_GetName(self.cogr_layer)
             name_b = name_c
@@ -445,14 +468,14 @@ cdef class Session:
             self._fileencoding = (
                 ograpi.OGR_L_TestCapability(
                     self.cogr_layer, OLC_STRINGSASUTF8) and
-                OGR_DETECTED_ENCODING) or (
+                'utf-8') or (
                 self.get_driver() == "ESRI Shapefile" and
                 'ISO-8859-1') or locale.getpreferredencoding().upper()
 
     def stop(self):
         self.cogr_layer = NULL
         if self.cogr_ds is not NULL:
-            ograpi.OGR_DS_Destroy(self.cogr_ds)
+            ograpi.GDALClose(self.cogr_ds)
         self.cogr_ds = NULL
 
     def get_fileencoding(self):
@@ -473,7 +496,7 @@ cdef class Session:
         return ograpi.OGR_L_GetFeatureCount(self.cogr_layer, 0)
 
     def get_driver(self):
-        cdef void *cogr_driver = ograpi.OGR_DS_GetDriver(self.cogr_ds)
+        cdef void *cogr_driver = ograpi.GDALGetDatasetDriver(self.cogr_ds)
         if cogr_driver == NULL:
             raise ValueError("Null driver")
         cdef char *name = ograpi.OGR_Dr_GetName(cogr_driver)
@@ -614,7 +637,7 @@ cdef class Session:
             crs_wkt = proj_b.decode('utf-8')
             ograpi.CPLFree(proj_c)
         else:
-            log.debug("Projection not found (cogr_crs was NULL)")
+            log.debug("Projection not found (cogr_crs was NULL)")        
         return crs_wkt
 
     def get_extent(self):
@@ -711,24 +734,29 @@ cdef class WritingSession(Session):
             if os.path.exists(path):
                 try:
                     path_b = path.encode('utf-8')
-                except UnicodeError:
+                except UnicodeDecodeError:
                     path_b = path
                 path_c = path_b
                 with cpl_errs:
-                    self.cogr_ds = ograpi.OGROpen(path_c, 1, NULL)
+                    self.cogr_ds = ograpi.GDALOpenEx(path_c,
+                                                 ograpi.GDAL_OF_VECTOR | ograpi.GDAL_OF_UPDATE,
+                                                 NULL,
+                                                 NULL,
+                                                 NULL)
+#                     self.cogr_ds = ograpi.OGROpen(path_c, 1, NULL)
                 if self.cogr_ds == NULL:
                     raise RuntimeError("Failed to open %s" % path)
-                cogr_driver = ograpi.OGR_DS_GetDriver(self.cogr_ds)
+                cogr_driver = ograpi.GDALGetDatasetDriver(self.cogr_ds)
                 if cogr_driver == NULL:
                     raise ValueError("Null driver")
 
                 if isinstance(collection.name, string_types):
                     name_b = collection.name.encode()
                     name_c = name_b
-                    self.cogr_layer = ograpi.OGR_DS_GetLayerByName(
+                    self.cogr_layer = ograpi.GDALDatasetGetLayerByName(
                                         self.cogr_ds, name_c)
                 elif isinstance(collection.name, int):
-                    self.cogr_layer = ograpi.OGR_DS_GetLayer(
+                    self.cogr_layer = ograpi.GDALDatasetGetLayer(
                                         self.cogr_ds, collection.name)
 
                 if self.cogr_layer == NULL:
@@ -740,41 +768,70 @@ cdef class WritingSession(Session):
             userencoding = self.collection.encoding
             self._fileencoding = (userencoding or (
                 ograpi.OGR_L_TestCapability(self.cogr_layer, OLC_STRINGSASUTF8) and
-                OGR_DETECTED_ENCODING) or (
+                'utf-8') or (
                 self.get_driver() == "ESRI Shapefile" and
                 'ISO-8859-1') or locale.getpreferredencoding()).upper()
 
         elif collection.mode == 'w':
             try:
                 path_b = path.encode('utf-8')
-            except UnicodeError:
+            except UnicodeDecodeError:
                 path_b = path
             path_c = path_b
             driver_b = collection.driver.encode()
             driver_c = driver_b
 
-            cogr_driver = ograpi.OGRGetDriverByName(driver_c)
+            cogr_driver = ograpi.GDALGetDriverByName(driver_c)
             if cogr_driver == NULL:
                 raise ValueError("Null driver")
 
             if not os.path.exists(path):
-                cogr_ds = ograpi.OGR_Dr_CreateDataSource(
-                    cogr_driver, path_c, NULL)
+#                 cogr_ds = ograpi.OGR_Dr_CreateDataSource(
+#                     cogr_driver, path_c, NULL)
+                cogr_ds = ograpi.GDALCreate(
+                    cogr_driver,
+                    path_c,
+                    0,
+                    0,
+                    0,
+                    ograpi.GDT_Unknown,
+                    NULL)
+                pass
 
             else:
                 with cpl_errs:
-                    cogr_ds = ograpi.OGROpen(path_c, 1, NULL)
+                    cogr_ds = ograpi.GDALOpenEx(path_c,
+                                     ograpi.GDAL_OF_VECTOR | ograpi.GDAL_OF_UPDATE,
+                                     NULL,
+                                     NULL,
+                                     NULL)
+#                     cogr_ds = ograpi.OGROpen(path_c, 1, NULL)
                 if cogr_ds == NULL:
-                    cogr_ds = ograpi.OGR_Dr_CreateDataSource(
-                        cogr_driver, path_c, NULL)
+                    cogr_ds = ograpi.GDALCreate(
+                        cogr_driver,
+                        path_c,
+                        0,
+                        0,
+                        0,
+                        ograpi.GDT_Unknown,
+                        NULL)
+#                     cogr_ds = ograpi.OGR_Dr_CreateDataSource(
+#                         cogr_driver, path_c, NULL)
 
                 elif collection.name is None:
-                    ograpi.OGR_DS_Destroy(cogr_ds)
+                    ograpi.GDALClose(cogr_ds)
                     cogr_ds == NULL
                     log.debug("Deleted pre-existing data at %s", path)
-                    
-                    cogr_ds = ograpi.OGR_Dr_CreateDataSource(
-                        cogr_driver, path_c, NULL)
+                    cogr_ds = ograpi.GDALCreate(
+                        cogr_driver,
+                        path_c,
+                        0,
+                        0,
+                        0,
+                        ograpi.GDT_Unknown,
+                        NULL)
+#                     cogr_ds = ograpi.OGR_Dr_CreateDataSource(
+#                         cogr_driver, path_c, NULL)
 
                 else:
                     pass
@@ -842,10 +899,10 @@ cdef class WritingSession(Session):
                 options = ograpi.CSLSetNameValue(options, "ENCODING", fileencoding_c)
 
             # Does the layer exist already? If so, we delete it.
-            layer_count = ograpi.OGR_DS_GetLayerCount(self.cogr_ds)
+            layer_count = ograpi.GDALDatasetGetLayerCount(self.cogr_ds)
             layer_names = []
             for i in range(layer_count):
-                cogr_layer = ograpi.OGR_DS_GetLayer(cogr_ds, i)
+                cogr_layer = ograpi.GDALDatasetGetLayer(cogr_ds, i)
                 name_c = ograpi.OGR_L_GetName(cogr_layer)
                 name_b = name_c
                 layer_names.append(name_b.decode('utf-8'))
@@ -859,12 +916,12 @@ cdef class WritingSession(Session):
                     idx = collection.name
             if idx >= 0:
                 log.debug("Deleted pre-existing layer at %s", collection.name)
-                ograpi.OGR_DS_DeleteLayer(self.cogr_ds, idx)
+                ograpi.GDALDatasetDeleteLayer(self.cogr_ds, idx)
             
             # Create the named layer in the datasource.
             name_b = collection.name.encode('utf-8')
             name_c = name_b
-            self.cogr_layer = ograpi.OGR_DS_CreateLayer(
+            self.cogr_layer = ograpi.GDALDatasetCreateLayer(
                 self.cogr_ds, 
                 name_c,
                 cogr_srs,
@@ -891,7 +948,7 @@ cdef class WritingSession(Session):
                 # https://github.com/Toblerity/Fiona/issues/101.
                 if value == 'long':
                     value = 'int'
-                
+
                 # Is there a field width/precision?
                 width = precision = None
                 if ':' in value:
@@ -900,12 +957,18 @@ cdef class WritingSession(Session):
                         width, precision = map(int, fmt.split('.'))
                     else:
                         width = int(fmt)
-                
+
+                field_type = FIELD_TYPES.index(value)
+                # See https://trac.osgeo.org/gdal/wiki/rfc31_ogr_64
+                if value == 'int' and (width is not None and width >= 10):
+                    field_type = 12
+
                 encoding = self.get_internalencoding()
                 key_bytes = key.encode(encoding)
+
                 cogr_fielddefn = ograpi.OGR_Fld_Create(
-                    key_bytes, 
-                    FIELD_TYPES.index(value) )
+                    key_bytes,
+                    field_type)
                 if cogr_fielddefn == NULL:
                     raise ValueError("Null field definition")
                 if width:
@@ -935,7 +998,7 @@ cdef class WritingSession(Session):
             raise ValueError("Null layer")
     
         schema_geom_type = collection.schema['geometry']
-        cogr_driver = ograpi.OGR_DS_GetDriver(self.cogr_ds)
+        cogr_driver = ograpi.GDALGetDatasetDriver(self.cogr_ds)
         if ograpi.OGR_Dr_GetName(cogr_driver) == b"GeoJSON":
             def validate_geometry_type(rec):
                 return True
@@ -982,9 +1045,8 @@ cdef class WritingSession(Session):
         if cogr_ds == NULL:
             raise ValueError("Null data source")
         log.debug("Syncing OGR to disk")
-        retval = ograpi.OGR_DS_SyncToDisk(cogr_ds)
-        if retval != OGRERR_NONE:
-            raise RuntimeError("Failed to sync to disk")
+
+        ograpi.GDALFlushCache(cogr_ds)
 
 
 cdef class Iterator:
@@ -1187,6 +1249,27 @@ cdef class KeysIterator(Iterator):
         return fid
 
 
+def _remove(path, driver=None):
+    """Deletes an OGR data source
+    """
+    cdef void *cogr_driver
+    cdef int result
+
+    if driver is None:
+        driver = 'ESRI Shapefile'
+
+    cogr_driver = ograpi.OGRGetDriverByName(driver.encode('utf-8'))
+    if cogr_driver == NULL:
+        raise ValueError("Null driver")
+
+    if not ograpi.OGR_Dr_TestCapability(cogr_driver, ODrCDeleteDataSource):
+        raise RuntimeError("Driver does not support dataset removal operation")
+
+    result = ograpi.GDALDeleteDataset(cogr_driver, path.encode('utf-8'))
+    if result != OGRERR_NONE:
+        raise RuntimeError("Failed to remove data source {}".format(path))
+
+
 def _listlayers(path):
 
     """Provides a list of the layers in an OGR data source.
@@ -1200,26 +1283,31 @@ def _listlayers(path):
     # Open OGR data source.
     try:
         path_b = path.encode('utf-8')
-    except UnicodeError:
+    except UnicodeDecodeError:
         path_b = path
     path_c = path_b
     with cpl_errs:
-        cogr_ds = ograpi.OGROpen(path_c, 0, NULL)
+        cogr_ds = ograpi.GDALOpenEx(path_c,
+             ograpi.GDAL_OF_VECTOR | ograpi.GDAL_OF_READONLY,
+             NULL,
+             NULL,
+             NULL)
+#         cogr_ds = ograpi.OGROpen(path_c, 0, NULL)
     if cogr_ds == NULL:
         raise ValueError("No data available at path '%s'" % path)
     
     # Loop over the layers to get their names.
-    layer_count = ograpi.OGR_DS_GetLayerCount(cogr_ds)
+    layer_count = ograpi.GDALDatasetGetLayerCount(cogr_ds)
     layer_names = []
     for i in range(layer_count):
-        cogr_layer = ograpi.OGR_DS_GetLayer(cogr_ds, i)
+        cogr_layer = ograpi.GDALDatasetGetLayer(cogr_ds, i)
         name_c = ograpi.OGR_L_GetName(cogr_layer)
         name_b = name_c
         layer_names.append(name_b.decode('utf-8'))
     
     # Close up data source.
     if cogr_ds is not NULL:
-        ograpi.OGR_DS_Destroy(cogr_ds)
+        ograpi.GDALClose(cogr_ds)
     cogr_ds = NULL
 
     return layer_names
diff --git a/fiona/transform.py b/fiona/transform.py
index d80297b..b380b6a 100644
--- a/fiona/transform.py
+++ b/fiona/transform.py
@@ -1,17 +1,92 @@
-"""Raster warping and reprojection"""
+"""Coordinate and geometry warping and reprojection"""
 
 from fiona._transform import _transform, _transform_geom
 
 
 def transform(src_crs, dst_crs, xs, ys):
-    """Return transformed vectors of x and y."""
+    """Transform coordinates from one reference system to another.
+
+    Parameters
+    ----------
+    src_crs: str or dict
+        A string like 'EPSG:4326' or a dict of proj4 parameters like
+        {'proj': 'lcc', 'lat_0': 18.0, 'lat_1': 18.0, 'lon_0': -77.0}
+        representing the coordinate reference system on the "source"
+        or "from" side of the transformation.
+    dst_crs: str or dict
+        A string or dict representing the coordinate reference system
+        on the "destination" or "to" side of the transformation.
+    xs: sequence of float
+        A list or tuple of x coordinate values. Must have the same
+        length as the ``ys`` parameter.
+    ys: sequence of float
+        A list or tuple of y coordinate values. Must have the same
+        length as the ``xs`` parameter.
+
+    Returns
+    -------
+    xp, yp: list of float
+        A pair of transformed coordinate sequences. The elements of
+        ``xp`` and ``yp`` correspond exactly to the elements of the
+        ``xs`` and ``ys`` input parameters.
+
+    Examples
+    --------
+
+    >>> transform('EPSG:4326', 'EPSG:26953', [-105.0], [40.0])
+    ([957097.0952383667], [378940.8419189212])
+
+    """
+    # Function is implemented in the _transform C extension module.
     return _transform(src_crs, dst_crs, xs, ys)
 
 
 def transform_geom(
         src_crs, dst_crs, geom,
         antimeridian_cutting=False, antimeridian_offset=10.0, precision=-1):
-    """Return transformed geometry."""
+    """Transform a geometry obj from one reference system to another.
+
+    Parameters
+    ----------
+    src_crs: str or dict
+        A string like 'EPSG:4326' or a dict of proj4 parameters like
+        {'proj': 'lcc', 'lat_0': 18.0, 'lat_1': 18.0, 'lon_0': -77.0}
+        representing the coordinate reference system on the "source"
+        or "from" side of the transformation.
+    dst_crs: str or dict
+        A string or dict representing the coordinate reference system
+        on the "destination" or "to" side of the transformation.
+    geom: obj
+        A GeoJSON-like geometry object with 'type' and 'coordinates'
+        members.
+    antimeridian_cutting: bool, optional
+        ``True`` to cut output geometries in two at the antimeridian,
+        the default is ``False`.
+    antimeridian_offset: float, optional
+        A distance in decimal degrees from the antimeridian, outside of
+        which geometries will not be cut.
+    precision: int, optional
+        Optional rounding precision of output coordinates, in number
+        of decimal places.
+
+    Returns
+    -------
+    obj
+        A new GeoJSON-like geometry with transformed coordinates. Note
+        that if the output is at the antimeridian, it may be cut and 
+        of a different geometry ``type`` than the input, e.g., a
+        polygon input may result in multi-polygon output.
+
+    Examples
+    --------
+
+    >>> transform_geom(
+    ...     'EPSG:4326', 'EPSG:26953',
+    ...     {'type': 'Point', 'coordinates': [-105.0, 40.0]})
+    {'type': 'Point', 'coordinates': (957097.0952383667, 378940.8419189212)}
+
+    """
+    # Function is implemented in the _transform C extension module.
     return _transform_geom(
         src_crs, dst_crs, geom,
         antimeridian_cutting, antimeridian_offset, precision)
diff --git a/pep-508-install b/pep-508-install
new file mode 100755
index 0000000..4bfc6c1
--- /dev/null
+++ b/pep-508-install
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+"""Prototype support for PEP 518:
+
+"Specifying Minimum Build System Requirements for Python Projects".
+
+A future version of pip will do this for us and we'll remove this script.
+
+This script installs Fiona in develop mode (``pip install -e .[test]``).
+"""
+
+import subprocess
+
+
+def main():
+
+    # Parse config file for build system requirements.
+    build_system_requirements = None
+    with open('pyproject.toml') as config:
+        for line in config:
+            if line.startswith('requires'):
+                build_system_requirements = line.split('=')[-1]
+
+    # Install them if found.
+    if build_system_requirements:
+        reqs = eval(build_system_requirements)
+        subprocess.call(['pip', 'install'] +  reqs)
+
+    # Now install our package in editable mode.
+    subprocess.call(['pip', 'install', '-e', '.[test]'] +  reqs)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..48286c3
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+# Minimum requirements for the build system to execute.
+requires = ["setuptools", "wheel", "cython"]
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 3012d81..e8fdee9 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,4 +1,7 @@
+-r requirements.txt
+coverage
 cython>=0.21.2
 nose
 pytest
+pytest-cov
 setuptools
diff --git a/requirements.txt b/requirements.txt
index 92e10dc..b5b3e44 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,3 +2,4 @@ argparse
 cligj
 six
 ordereddict
+munch
diff --git a/scripts/travis_gdal_install.sh b/scripts/travis_gdal_install.sh
new file mode 100755
index 0000000..ea94072
--- /dev/null
+++ b/scripts/travis_gdal_install.sh
@@ -0,0 +1,89 @@
+#!/bin/sh
+set -ex
+
+GDALOPTS="  --with-ogr \
+            --with-geos \
+            --with-expat \
+            --without-libtool \
+            --with-libz=internal \
+            --with-libtiff=internal \
+            --with-geotiff=internal \
+            --without-gif \
+            --without-pg \
+            --without-grass \
+            --without-libgrass \
+            --without-cfitsio \
+            --without-pcraster \
+            --without-netcdf \
+            --without-png \
+            --with-jpeg=internal \
+            --without-gif \
+            --without-ogdi \
+            --without-fme \
+            --without-hdf4 \
+            --without-hdf5 \
+            --without-jasper \
+            --without-ecw \
+            --without-kakadu \
+            --without-mrsid \
+            --without-jp2mrsid \
+            --without-bsb \
+            --without-grib \
+            --without-mysql \
+            --without-ingres \
+            --without-xerces \
+            --without-odbc \
+            --without-curl \
+            --without-sqlite3 \
+            --without-dwgdirect \
+            --without-idb \
+            --without-sde \
+            --without-perl \
+            --without-php \
+            --without-ruby \
+            --without-python"
+
+# Create build dir if not exists
+if [ ! -d "$GDALBUILD" ]; then
+  mkdir $GDALBUILD;
+fi
+
+if [ ! -d "$GDALINST" ]; then
+  mkdir $GDALINST;
+fi
+
+ls -l $GDALINST
+
+# download and compile gdal version
+if [ ! -d $GDALINST/gdal-1.9.2 ]; then
+  cd $GDALBUILD
+  wget http://download.osgeo.org/gdal/gdal-1.9.2.tar.gz
+  tar -xzf gdal-1.9.2.tar.gz
+  cd gdal-1.9.2
+  ./configure --prefix=$GDALINST/gdal-1.9.2 $GDALOPTS
+  make -s -j 2
+  make install
+fi
+
+if [ ! -d $GDALINST/gdal-1.11.4 ]; then
+  cd $GDALBUILD
+  wget http://download.osgeo.org/gdal/1.11.4/gdal-1.11.4.tar.gz
+  tar -xzf gdal-1.11.4.tar.gz
+  cd gdal-1.11.4
+  ./configure --prefix=$GDALINST/gdal-1.11.4 $GDALOPTS
+  make -s -j 2
+  make install
+fi
+
+if [ ! -d $GDALINST/gdal-2.0.2 ]; then
+  cd $GDALBUILD
+  wget http://download.osgeo.org/gdal/2.0.2/gdal-2.0.2.tar.gz
+  tar -xzf gdal-2.0.2.tar.gz
+  cd gdal-2.0.2
+  ./configure --prefix=$GDALINST/gdal-2.0.2 $GDALOPTS
+  make -s -j 2
+  make install
+fi
+
+# change back to travis build dir
+cd $TRAVIS_BUILD_DIR
diff --git a/setup.py b/setup.py
index 77a1953..53a9380 100644
--- a/setup.py
+++ b/setup.py
@@ -66,63 +66,76 @@ with open('CREDITS.txt', **open_kwds) as f:
 with open('CHANGES.txt', **open_kwds) as f:
     changes = f.read()
 
+
+def copy_gdalapi(gdalversion):
+    if gdalversion[0] == u'1':
+        log.info("Building Fiona for gdal 1.x: {0}".format(gdalversion))
+        shutil.copy('fiona/ogrext1.pyx', 'fiona/ogrext.pyx')
+        shutil.copy('fiona/ograpi1.pxd', 'fiona/ograpi.pxd')
+    else:
+        log.info("Building Fiona for gdal 2.x: {0}".format(gdalversion))
+        shutil.copy('fiona/ogrext2.pyx', 'fiona/ogrext.pyx')
+        shutil.copy('fiona/ograpi2.pxd', 'fiona/ograpi.pxd')
+
+if '--gdalversion' in sys.argv and 'clean' not in sys.argv:
+    index = sys.argv.index('--gdalversion')
+    sys.argv.pop(index)
+    gdalversion = sys.argv.pop(index)
+    copy_gdalapi(gdalversion)
+
 # By default we'll try to get options via gdal-config. On systems without,
 # options will need to be set in setup.cfg or on the setup command line.
 include_dirs = []
 library_dirs = []
 libraries = []
 extra_link_args = []
-gdal_output = [None]*3
+gdal_output = [None] * 4
 
-try:
-    gdal_config = os.environ.get('GDAL_CONFIG', 'gdal-config')
-    for i, flag in enumerate(("--cflags", "--libs", "--datadir")):
-        gdal_output[i] = check_output([gdal_config, flag]).strip()
-
-    for item in gdal_output[0].split():
-        if item.startswith("-I"):
-            include_dirs.extend(item[2:].split(":"))
-    for item in gdal_output[1].split():
-        if item.startswith("-L"):
-            library_dirs.extend(item[2:].split(":"))
-        elif item.startswith("-l"):
-            libraries.append(item[2:])
+if 'clean' not in sys.argv:
+    try:
+        gdal_config = os.environ.get('GDAL_CONFIG', 'gdal-config')
+        for i, flag in enumerate(("--cflags", "--libs", "--datadir", "--version")):
+            gdal_output[i] = check_output([gdal_config, flag]).strip()
+
+        for item in gdal_output[0].split():
+            if item.startswith("-I"):
+                include_dirs.extend(item[2:].split(":"))
+        for item in gdal_output[1].split():
+            if item.startswith("-L"):
+                library_dirs.extend(item[2:].split(":"))
+            elif item.startswith("-l"):
+                libraries.append(item[2:])
+            else:
+                # e.g. -framework GDAL
+                extra_link_args.append(item)
+
+        copy_gdalapi(gdal_output[3])
+
+    except Exception as e:
+        if os.name == "nt":
+            log.info("Building on Windows requires extra options to setup.py "
+                     "to locate needed GDAL files.\nMore information is "
+                     "available in the README.")
         else:
-            # e.g. -framework GDAL
-            extra_link_args.append(item)
+            log.warning("Failed to get options via gdal-config: %s", str(e))
 
-except Exception as e:
-    if os.name == "nt":
-        log.info(("Building on Windows requires extra options to setup.py to locate needed GDAL files.\n"
-                   "More information is available in the README."))
-    else:
-        log.warning("Failed to get options via gdal-config: %s", str(e))
-
-    # Conditionally copy the GDAL data. To be used in conjunction with
-    # the bdist_wheel command to make self-contained binary wheels.
     if os.environ.get('PACKAGE_DATA'):
-        try:
-            shutil.rmtree('fiona/gdal_data')
-        except OSError:
-            pass
-        shutil.copytree(datadir, 'fiona/gdal_data')
-if os.environ.get('PACKAGE_DATA'):
-    destdir = 'fiona/gdal_data'
-    if gdal_output[2]:
-        log.info("Copying gdal data from %s" % gdal_output[2])
-        copy_data_tree(gdal_output[2], destdir)
-    else:
-        # check to see if GDAL_DATA is defined
-        gdal_data = os.environ.get('GDAL_DATA', None)
-        if gdal_data:
-            log.info("Copying gdal data from %s" % gdal_data)
-            copy_data_tree(gdal_data, destdir)
-
-    # Conditionally copy PROJ.4 data. 
-    projdatadir = os.environ.get('PROJ_LIB', '/usr/local/share/proj')
-    if os.path.exists(projdatadir):
-        log.info("Copying proj data from %s" % projdatadir)
-        copy_data_tree(projdatadir, 'fiona/proj_data')
+        destdir = 'fiona/gdal_data'
+        if gdal_output[2]:
+            log.info("Copying gdal data from %s" % gdal_output[2])
+            copy_data_tree(gdal_output[2], destdir)
+        else:
+            # check to see if GDAL_DATA is defined
+            gdal_data = os.environ.get('GDAL_DATA', None)
+            if gdal_data:
+                log.info("Copying gdal data from %s" % gdal_data)
+                copy_data_tree(gdal_data, destdir)
+
+        # Conditionally copy PROJ.4 data.
+        projdatadir = os.environ.get('PROJ_LIB', '/usr/local/share/proj')
+        if os.path.exists(projdatadir):
+            log.info("Copying proj data from %s" % projdatadir)
+            copy_data_tree(projdatadir, 'fiona/proj_data')
 
 ext_options = dict(
     include_dirs=include_dirs,
@@ -131,16 +144,18 @@ ext_options = dict(
     extra_link_args=extra_link_args)
 
 # When building from a repo, Cython is required.
-if os.path.exists("MANIFEST.in"):
+if os.path.exists("MANIFEST.in") and "clean" not in sys.argv:
     log.info("MANIFEST.in found, presume a repo, cythonizing...")
     if not cythonize:
         log.critical(
             "Cython.Build.cythonize not found. "
             "Cython is required to build from a repo.")
         sys.exit(1)
+
     ext_modules = cythonize([
         Extension('fiona._geometry', ['fiona/_geometry.pyx'], **ext_options),
         Extension('fiona._transform', ['fiona/_transform.pyx'], **ext_options),
+        Extension('fiona._crs', ['fiona/_crs.pyx'], **ext_options),
         Extension('fiona._drivers', ['fiona/_drivers.pyx'], **ext_options),
         Extension('fiona._err', ['fiona/_err.pyx'], **ext_options),
         Extension('fiona.ogrext', ['fiona/ogrext.pyx'], **ext_options)])
@@ -149,6 +164,7 @@ else:
     ext_modules = [
         Extension('fiona._transform', ['fiona/_transform.cpp'], **ext_options),
         Extension('fiona._geometry', ['fiona/_geometry.c'], **ext_options),
+        Extension('fiona._crs', ['fiona/_crs.c'], **ext_options),
         Extension('fiona._drivers', ['fiona/_drivers.c'], **ext_options),
         Extension('fiona._err', ['fiona/_err.c'], **ext_options),
         Extension('fiona.ogrext', ['fiona/ogrext.c'], **ext_options)]
@@ -156,7 +172,8 @@ else:
 requirements = [
     'cligj',
     'click-plugins',
-    'six'
+    'six',
+    'munch'
 ]
 if sys.version_info < (2, 7):
     requirements.append('argparse')
@@ -166,8 +183,8 @@ setup_args = dict(
     metadata_version='1.2',
     name='Fiona',
     version=version,
-    requires_python = '>=2.6',
-    requires_external = 'GDAL (>=1.8)',
+    requires_python='>=2.6',
+    requires_external='GDAL (>=1.8)',
     description="Fiona reads and writes spatial data files",
     license='BSD',
     keywords='gis vector feature data',
@@ -185,16 +202,22 @@ setup_args = dict(
 
         [fiona.fio_commands]
         bounds=fiona.fio.bounds:bounds
+        calc=fiona.fio.calc:calc
         cat=fiona.fio.cat:cat
-        collect=fiona.fio.cat:collect
-        distrib=fiona.fio.cat:distrib
-        dump=fiona.fio.cat:dump
-        env=fiona.fio.info:env
+        collect=fiona.fio.collect:collect
+        distrib=fiona.fio.distrib:distrib
+        dump=fiona.fio.dump:dump
+        env=fiona.fio.env:env
+        filter=fiona.fio.filter:filter
         info=fiona.fio.info:info
-        insp=fiona.fio.info:insp
-        load=fiona.fio.cat:load
+        insp=fiona.fio.insp:insp
+        load=fiona.fio.load:load
+        ls=fiona.fio.ls:ls
         ''',
     install_requires=requirements,
+    extras_require={
+        'calc': ['shapely'],
+        'test': ['nose']},
     tests_require=['nose'],
     test_suite='nose.collector',
     ext_modules=ext_modules,
diff --git a/tests/test_bigint.py b/tests/test_bigint.py
new file mode 100644
index 0000000..35f4eaa
--- /dev/null
+++ b/tests/test_bigint.py
@@ -0,0 +1,69 @@
+import fiona
+import os
+import shutil
+import tempfile
+import unittest
+from fiona.ogrext import calc_gdal_version_num, get_gdal_version_num
+
+"""
+
+OGR 54bit handling: https://trac.osgeo.org/gdal/wiki/rfc31_ogr_64
+
+Shapefile: OFTInteger fields are created by default with a width of 9
+characters, so to be unambiguously read as OFTInteger (and if specifying
+integer that require 10 or 11 characters. the field is dynamically extended
+like managed since a few versions). OFTInteger64 fields are created by default
+with a width of 18 digits, so to be unambiguously read as OFTInteger64, and
+extented to 19 or 20 if needed. Integer fields of width between 10 and 18
+will be read as OFTInteger64. Above they will be treated as OFTReal. In
+previous GDAL versions, Integer fields were created with a default with of 10,
+and thus will be now read as OFTInteger64. An open option, DETECT_TYPE=YES, can
+be specified so as OGR does a full scan of the DBF file to see if integer
+fields of size 10 or 11 hold 32 bit or 64 bit values and adjust the type
+accordingly (and same for integer fields of size 19 or 20, in case of overflow
+of 64 bit integer, OFTReal is chosen)
+"""
+class TestBigInt(unittest.TestCase):
+
+    def setUp(self):
+        self.tempdir = tempfile.mkdtemp()
+
+    def tearDown(self):
+        shutil.rmtree(self.tempdir)
+
+    def testCreateBigIntSchema(self):
+        name = os.path.join(self.tempdir, 'output1.shp')
+
+        a_bigint = 10 ** 18 - 1
+        fieldname = 'abigint'
+
+        kwargs = {
+            'driver': 'ESRI Shapefile',
+            'crs': 'EPSG:4326',
+            'schema': {
+                'geometry': 'Point',
+                'properties': [(fieldname, 'int:10')]}}
+        if get_gdal_version_num() < calc_gdal_version_num(2, 0, 0):
+            with self.assertRaises(OverflowError):
+                with fiona.open(name, 'w', **kwargs) as dst:
+                    rec = {}
+                    rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)}
+                    rec['properties'] = {fieldname: a_bigint}
+                    dst.write(rec)
+        else:
+
+            with fiona.open(name, 'w', **kwargs) as dst:
+                rec = {}
+                rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)}
+                rec['properties'] = {fieldname: a_bigint}
+                dst.write(rec)
+
+            with fiona.open(name) as src:
+                if get_gdal_version_num() >= calc_gdal_version_num(2, 0, 0):
+                    first = next(src)
+                    self.assertEqual(first['properties'][fieldname], a_bigint)
+
+
+if __name__ == "__main__":
+    # import sys;sys.argv = ['', 'Test.testName']
+    unittest.main()
diff --git a/tests/test_bytescollection.py b/tests/test_bytescollection.py
index 3aecd30..cdee1f0 100644
--- a/tests/test_bytescollection.py
+++ b/tests/test_bytescollection.py
@@ -23,159 +23,159 @@ class ReadingTest(unittest.TestCase):
 
     def test_open_repr(self):
         # I'm skipping checking the name of the virtual file as it produced by uuid.
-        self.failUnless(
+        self.assertTrue(
             repr(self.c).startswith("<open BytesCollection '/vsimem/") and
             repr(self.c).endswith(":OGRGeoJSON', mode 'r' at %s>" % hex(id(self.c))))
 
     def test_closed_repr(self):
         # I'm skipping checking the name of the virtual file as it produced by uuid.
         self.c.close()
-        self.failUnless(
+        self.assertTrue(
             repr(self.c).startswith("<closed BytesCollection '/vsimem/") and
             repr(self.c).endswith(":OGRGeoJSON', mode 'r' at %s>" % hex(id(self.c))))
 
     def test_path(self):
-        self.failUnlessEqual(self.c.path, self.c.virtual_file)
+        self.assertEqual(self.c.path, self.c.virtual_file)
 
     def test_closed_virtual_file(self):
         self.c.close()
-        self.failUnless(self.c.virtual_file is None)
+        self.assertTrue(self.c.virtual_file is None)
 
     def test_closed_buf(self):
         self.c.close()
-        self.failUnless(self.c.bytesbuf is None)
+        self.assertTrue(self.c.bytesbuf is None)
 
     def test_name(self):
-        self.failUnlessEqual(self.c.name, 'OGRGeoJSON')
+        self.assertEqual(self.c.name, 'OGRGeoJSON')
 
     def test_mode(self):
-        self.failUnlessEqual(self.c.mode, 'r')
+        self.assertEqual(self.c.mode, 'r')
 
     def test_collection(self):
-        self.failUnlessEqual(self.c.encoding, 'utf-8')
+        self.assertEqual(self.c.encoding, 'utf-8')
 
     def test_iter(self):
-        self.failUnless(iter(self.c))
+        self.assertTrue(iter(self.c))
 
     def test_closed_no_iter(self):
         self.c.close()
         self.assertRaises(ValueError, iter, self.c)
 
     def test_len(self):
-        self.failUnlessEqual(len(self.c), 67)
+        self.assertEqual(len(self.c), 67)
 
     def test_closed_len(self):
         # Len is lazy, it's never computed in this case. TODO?
         self.c.close()
-        self.failUnlessEqual(len(self.c), 0)
+        self.assertEqual(len(self.c), 0)
 
     def test_len_closed_len(self):
         # Lazy len is computed in this case and sticks.
         len(self.c)
         self.c.close()
-        self.failUnlessEqual(len(self.c), 67)
+        self.assertEqual(len(self.c), 67)
 
     def test_driver(self):
-        self.failUnlessEqual(self.c.driver, "GeoJSON")
+        self.assertEqual(self.c.driver, "GeoJSON")
 
     def test_closed_driver(self):
         self.c.close()
-        self.failUnlessEqual(self.c.driver, None)
+        self.assertEqual(self.c.driver, None)
 
     def test_driver_closed_driver(self):
         self.c.driver
         self.c.close()
-        self.failUnlessEqual(self.c.driver, "GeoJSON")
+        self.assertEqual(self.c.driver, "GeoJSON")
 
     def test_schema(self):
         s = self.c.schema['properties']
-        self.failUnlessEqual(s['PERIMETER'], "float")
-        self.failUnlessEqual(s['NAME'], "str")
-        self.failUnlessEqual(s['URL'], "str")
-        self.failUnlessEqual(s['STATE_FIPS'], "str")
-        self.failUnlessEqual(s['WILDRNP020'], "int")
+        self.assertEqual(s['PERIMETER'], "float")
+        self.assertEqual(s['NAME'], "str")
+        self.assertEqual(s['URL'], "str")
+        self.assertEqual(s['STATE_FIPS'], "str")
+        self.assertEqual(s['WILDRNP020'], "int")
 
     def test_closed_schema(self):
         # Schema is lazy too, never computed in this case. TODO?
         self.c.close()
-        self.failUnlessEqual(self.c.schema, None)
+        self.assertEqual(self.c.schema, None)
 
     def test_schema_closed_schema(self):
         self.c.schema
         self.c.close()
-        self.failUnlessEqual(
+        self.assertEqual(
             sorted(self.c.schema.keys()),
             ['geometry', 'properties'])
 
     def test_crs(self):
         crs = self.c.crs
-        self.failUnlessEqual(crs['init'], 'epsg:4326')
+        self.assertEqual(crs['init'], 'epsg:4326')
 
     def test_crs_wkt(self):
         crs = self.c.crs_wkt
-        self.failUnless(crs.startswith('GEOGCS["WGS 84"'))
+        self.assertTrue(crs.startswith('GEOGCS["WGS 84"'))
 
     def test_closed_crs(self):
         # Crs is lazy too, never computed in this case. TODO?
         self.c.close()
-        self.failUnlessEqual(self.c.crs, None)
+        self.assertEqual(self.c.crs, None)
 
     def test_crs_closed_crs(self):
         self.c.crs
         self.c.close()
-        self.failUnlessEqual(
+        self.assertEqual(
             sorted(self.c.crs.keys()),
             ['init'])
 
     def test_meta(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             sorted(self.c.meta.keys()),
             ['crs', 'crs_wkt', 'driver', 'schema'])
 
     def test_bounds(self):
-        self.failUnlessAlmostEqual(self.c.bounds[0], -113.564247, 6)
-        self.failUnlessAlmostEqual(self.c.bounds[1], 37.068981, 6)
-        self.failUnlessAlmostEqual(self.c.bounds[2], -104.970871, 6)
-        self.failUnlessAlmostEqual(self.c.bounds[3], 41.996277, 6)
+        self.assertAlmostEqual(self.c.bounds[0], -113.564247, 6)
+        self.assertAlmostEqual(self.c.bounds[1], 37.068981, 6)
+        self.assertAlmostEqual(self.c.bounds[2], -104.970871, 6)
+        self.assertAlmostEqual(self.c.bounds[3], 41.996277, 6)
 
     def test_iter_one(self):
         itr = iter(self.c)
         f = next(itr)
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_iter_list(self):
         f = list(self.c)[0]
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_re_iter_list(self):
         f = list(self.c)[0] # Run through iterator
         f = list(self.c)[0] # Run through a new, reset iterator
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_getitem_one(self):
         f = self.c[0]
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_no_write(self):
         self.assertRaises(IOError, self.c.write, {})
 
     def test_iter_items_list(self):
         i, f = list(self.c.items())[0]
-        self.failUnlessEqual(i, 0)
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(i, 0)
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_iter_keys_list(self):
         i = list(self.c.keys())[0]
-        self.failUnlessEqual(i, 0)
+        self.assertEqual(i, 0)
 
     def test_in_keys(self):
-        self.failUnless(0 in self.c.keys())
-        self.failUnless(0 in self.c)
+        self.assertTrue(0 in self.c.keys())
+        self.assertTrue(0 in self.c)
 
 class FilterReadingTest(unittest.TestCase):
 
@@ -189,16 +189,16 @@ class FilterReadingTest(unittest.TestCase):
 
     def test_filter_1(self):
         results = list(self.c.filter(bbox=(-120.0, 30.0, -100.0, 50.0)))
-        self.failUnlessEqual(len(results), 67)
+        self.assertEqual(len(results), 67)
         f = results[0]
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_filter_reset(self):
         results = list(self.c.filter(bbox=(-112.0, 38.0, -106.0, 40.0)))
-        self.failUnlessEqual(len(results), 26)
+        self.assertEqual(len(results), 26)
         results = list(self.c.filter())
-        self.failUnlessEqual(len(results), 67)
+        self.assertEqual(len(results), 67)
 
     def test_filter_mask(self):
         mask = {
@@ -206,7 +206,7 @@ class FilterReadingTest(unittest.TestCase):
             'coordinates': (
                 ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)),)}
         results = list(self.c.filter(mask=mask))
-        self.failUnlessEqual(len(results), 26)
+        self.assertEqual(len(results), 26)
 
 
 
diff --git a/tests/test_cli.py b/tests/test_cli.py
deleted file mode 100644
index ab659f1..0000000
--- a/tests/test_cli.py
+++ /dev/null
@@ -1,38 +0,0 @@
-from pkg_resources import iter_entry_points
-import re
-
-from click.testing import CliRunner
-
-from fiona.fio.main import main_group
-
-
-WILDSHP = 'tests/data/coutwildrnp.shp'
-
-
-def test_info_json():
-    runner = CliRunner()
-    result = runner.invoke(main_group, ['info', WILDSHP])
-    assert result.exit_code == 0
-    assert '"count": 67' in result.output
-    assert '"crs": "EPSG:4326"' in result.output
-    assert '"driver": "ESRI Shapefile"' in result.output
-
-
-def test_info_count():
-    runner = CliRunner()
-    result = runner.invoke(main_group, ['info', '--count', WILDSHP])
-    assert result.exit_code == 0
-    assert result.output == "67\n"
-
-
-def test_info_bounds():
-    runner = CliRunner()
-    result = runner.invoke(main_group, ['info', '--bounds', WILDSHP])
-    assert result.exit_code == 0
-    assert len(re.findall(r'\d*\.\d*', result.output)) == 4
-
-
-def test_all_registered():
-    # Make sure all the subcommands are actually registered to the main CLI group
-    for ep in iter_entry_points('fiona.fio_commands'):
-        assert ep.name in main_group.commands
diff --git a/tests/test_collection.py b/tests/test_collection.py
index f49608e..8c9cbd9 100644
--- a/tests/test_collection.py
+++ b/tests/test_collection.py
@@ -16,237 +16,257 @@ from fiona.errors import FionaValueError, DriverError, SchemaError, CRSError
 
 WILDSHP = 'tests/data/coutwildrnp.shp'
 
-#logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
+logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
 
 TEMPDIR = tempfile.gettempdir()
 
 
 class SupportedDriversTest(unittest.TestCase):
+
     def test_shapefile(self):
-        self.failUnless("ESRI Shapefile" in supported_drivers)
-        self.failUnlessEqual(
-            set(supported_drivers["ESRI Shapefile"]), set("raw") )
+        self.assertTrue("ESRI Shapefile" in supported_drivers)
+        self.assertEqual(
+            set(supported_drivers["ESRI Shapefile"]), set("raw"))
+
     def test_map(self):
-        self.failUnless("MapInfo File" in supported_drivers)
-        self.failUnlessEqual(
-            set(supported_drivers["MapInfo File"]), set("raw") )
+        self.assertTrue("MapInfo File" in supported_drivers)
+        self.assertEqual(
+            set(supported_drivers["MapInfo File"]), set("raw"))
 
 
 class CollectionArgsTest(unittest.TestCase):
+
     def test_path(self):
         self.assertRaises(TypeError, Collection, (0))
+
     def test_mode(self):
         self.assertRaises(TypeError, Collection, ("foo"), mode=0)
+
     def test_driver(self):
         self.assertRaises(TypeError, Collection, ("foo"), mode='w', driver=1)
+
     def test_schema(self):
         self.assertRaises(
-            TypeError, Collection, ("foo"), mode='w', 
+            TypeError, Collection, ("foo"), mode='w',
             driver="ESRI Shapefile", schema=1)
+
     def test_crs(self):
         self.assertRaises(
-            TypeError, Collection, ("foo"), mode='w', 
+            TypeError, Collection, ("foo"), mode='w',
             driver="ESRI Shapefile", schema=0, crs=1)
+
     def test_encoding(self):
         self.assertRaises(
-            TypeError, Collection, ("foo"), mode='r', 
+            TypeError, Collection, ("foo"), mode='r',
             encoding=1)
+
     def test_layer(self):
         self.assertRaises(
-            TypeError, Collection, ("foo"), mode='r', 
+            TypeError, Collection, ("foo"), mode='r',
             layer=0.5)
+
     def test_vsi(self):
         self.assertRaises(
-            TypeError, Collection, ("foo"), mode='r', 
+            TypeError, Collection, ("foo"), mode='r',
             vsi='git')
+
     def test_archive(self):
         self.assertRaises(
-            TypeError, Collection, ("foo"), mode='r', 
+            TypeError, Collection, ("foo"), mode='r',
             archive=1)
+
     def test_write_numeric_layer(self):
         self.assertRaises(ValueError, Collection, ("foo"), mode='w', layer=1)
+
     def test_write_geojson_layer(self):
         self.assertRaises(ValueError, Collection, ("foo"), mode='w', driver='GeoJSON', layer='foo')
+
     def test_append_geojson(self):
         self.assertRaises(ValueError, Collection, ("foo"), mode='w', driver='ARCGEN')
 
 
 class OpenExceptionTest(unittest.TestCase):
+
     def test_no_archive(self):
         self.assertRaises(IOError, fiona.open, ("/"), mode='r', vfs="zip:///foo.zip")
 
 
 class ReadingTest(unittest.TestCase):
-    
+
     def setUp(self):
         self.c = fiona.open(WILDSHP, "r")
-    
+
     def tearDown(self):
         self.c.close()
 
     def test_open_repr(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<open Collection 'tests/data/coutwildrnp.shp:coutwildrnp', mode 'r' "
-            "at %s>" % hex(id(self.c))))
+             "at %s>" % hex(id(self.c))))
 
     def test_closed_repr(self):
         self.c.close()
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<closed Collection 'tests/data/coutwildrnp.shp:coutwildrnp', mode 'r' "
-            "at %s>" % hex(id(self.c))))
+             "at %s>" % hex(id(self.c))))
 
     def test_path(self):
-        self.failUnlessEqual(self.c.path, WILDSHP)
+        self.assertEqual(self.c.path, WILDSHP)
 
     def test_name(self):
-        self.failUnlessEqual(self.c.name, 'coutwildrnp')
-    
+        self.assertEqual(self.c.name, 'coutwildrnp')
+
     def test_mode(self):
-        self.failUnlessEqual(self.c.mode, 'r')
+        self.assertEqual(self.c.mode, 'r')
 
     def test_collection(self):
-        self.failUnlessEqual(self.c.encoding, 'iso-8859-1')
+        self.assertEqual(self.c.encoding, 'iso-8859-1')
 
     def test_iter(self):
-        self.failUnless(iter(self.c))
-    
+        self.assertTrue(iter(self.c))
+
     def test_closed_no_iter(self):
         self.c.close()
         self.assertRaises(ValueError, iter, self.c)
 
     def test_len(self):
-        self.failUnlessEqual(len(self.c), 67)
-    
+        self.assertEqual(len(self.c), 67)
+
     def test_closed_len(self):
         # Len is lazy, it's never computed in this case. TODO?
         self.c.close()
-        self.failUnlessEqual(len(self.c), 0)
+        self.assertEqual(len(self.c), 0)
 
     def test_len_closed_len(self):
         # Lazy len is computed in this case and sticks.
         len(self.c)
         self.c.close()
-        self.failUnlessEqual(len(self.c), 67)
-    
+        self.assertEqual(len(self.c), 67)
+
     def test_driver(self):
-        self.failUnlessEqual(self.c.driver, "ESRI Shapefile")
-    
+        self.assertEqual(self.c.driver, "ESRI Shapefile")
+
     def test_closed_driver(self):
         self.c.close()
-        self.failUnlessEqual(self.c.driver, None)
+        self.assertEqual(self.c.driver, None)
 
     def test_driver_closed_driver(self):
         self.c.driver
         self.c.close()
-        self.failUnlessEqual(self.c.driver, "ESRI Shapefile")
-    
+        self.assertEqual(self.c.driver, "ESRI Shapefile")
+
     def test_schema(self):
         s = self.c.schema['properties']
-        self.failUnlessEqual(s['PERIMETER'], "float:24.15")
-        self.failUnlessEqual(s['NAME'], "str:80")
-        self.failUnlessEqual(s['URL'], "str:101")
-        self.failUnlessEqual(s['STATE_FIPS'], "str:80")
-        self.failUnlessEqual(s['WILDRNP020'], "int:10")
+        self.assertEqual(s['PERIMETER'], "float:24.15")
+        self.assertEqual(s['NAME'], "str:80")
+        self.assertEqual(s['URL'], "str:101")
+        self.assertEqual(s['STATE_FIPS'], "str:80")
+        self.assertEqual(s['WILDRNP020'], "int:10")
 
     def test_closed_schema(self):
         # Schema is lazy too, never computed in this case. TODO?
         self.c.close()
-        self.failUnlessEqual(self.c.schema, None)
+        self.assertEqual(self.c.schema, None)
 
     def test_schema_closed_schema(self):
         self.c.schema
         self.c.close()
-        self.failUnlessEqual(
+        self.assertEqual(
             sorted(self.c.schema.keys()),
             ['geometry', 'properties'])
 
     def test_crs(self):
         crs = self.c.crs
-        self.failUnlessEqual(crs['init'], 'epsg:4326')
+        self.assertEqual(crs['init'], 'epsg:4326')
 
     def test_crs_wkt(self):
         crs = self.c.crs_wkt
-        self.failUnless(crs.startswith('GEOGCS["GCS_WGS_1984"'))
+        self.assertTrue(crs.startswith('GEOGCS["GCS_WGS_1984"'))
 
     def test_closed_crs(self):
         # Crs is lazy too, never computed in this case. TODO?
         self.c.close()
-        self.failUnlessEqual(self.c.crs, None)
+        self.assertEqual(self.c.crs, None)
 
     def test_crs_closed_crs(self):
         self.c.crs
         self.c.close()
-        self.failUnlessEqual(
+        self.assertEqual(
             sorted(self.c.crs.keys()),
             ['init'])
 
     def test_meta(self):
-        self.failUnlessEqual(
-            sorted(self.c.meta.keys()), 
+        self.assertEqual(
+            sorted(self.c.meta.keys()),
+            ['crs', 'crs_wkt', 'driver', 'schema'])
+
+    def test_profile(self):
+        self.assertEqual(
+            sorted(self.c.profile.keys()),
             ['crs', 'crs_wkt', 'driver', 'schema'])
 
     def test_bounds(self):
-        self.failUnlessAlmostEqual(self.c.bounds[0], -113.564247, 6)
-        self.failUnlessAlmostEqual(self.c.bounds[1], 37.068981, 6)
-        self.failUnlessAlmostEqual(self.c.bounds[2], -104.970871, 6)
-        self.failUnlessAlmostEqual(self.c.bounds[3], 41.996277, 6)
+        self.assertAlmostEqual(self.c.bounds[0], -113.564247, 6)
+        self.assertAlmostEqual(self.c.bounds[1], 37.068981, 6)
+        self.assertAlmostEqual(self.c.bounds[2], -104.970871, 6)
+        self.assertAlmostEqual(self.c.bounds[3], 41.996277, 6)
 
     def test_context(self):
         with fiona.open(WILDSHP, "r") as c:
-            self.failUnlessEqual(c.name, 'coutwildrnp')
-            self.failUnlessEqual(len(c), 67)
-        self.failUnlessEqual(c.closed, True)
+            self.assertEqual(c.name, 'coutwildrnp')
+            self.assertEqual(len(c), 67)
+        self.assertEqual(c.closed, True)
 
     def test_iter_one(self):
         itr = iter(self.c)
         f = next(itr)
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_iter_list(self):
         f = list(self.c)[0]
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_re_iter_list(self):
-        f = list(self.c)[0] # Run through iterator
-        f = list(self.c)[0] # Run through a new, reset iterator
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        f = list(self.c)[0]  # Run through iterator
+        f = list(self.c)[0]  # Run through a new, reset iterator
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_getitem_one(self):
         f = self.c[0]
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_getitem_iter_combo(self):
         i = iter(self.c)
         f = next(i)
         f = next(i)
-        self.failUnlessEqual(f['id'], "1")
+        self.assertEqual(f['id'], "1")
         f = self.c[0]
-        self.failUnlessEqual(f['id'], "0")
+        self.assertEqual(f['id'], "0")
         f = next(i)
-        self.failUnlessEqual(f['id'], "2")
+        self.assertEqual(f['id'], "2")
 
     def test_no_write(self):
         self.assertRaises(IOError, self.c.write, {})
 
     def test_iter_items_list(self):
         i, f = list(self.c.items())[0]
-        self.failUnlessEqual(i, 0)
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(i, 0)
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_iter_keys_list(self):
         i = list(self.c.keys())[0]
-        self.failUnlessEqual(i, 0)
+        self.assertEqual(i, 0)
 
     def test_in_keys(self):
-        self.failUnless(0 in self.c.keys())
-        self.failUnless(0 in self.c)
+        self.assertTrue(0 in self.c.keys())
+        self.assertTrue(0 in self.c)
 
 
 class FilterReadingTest(unittest.TestCase):
@@ -259,34 +279,34 @@ class FilterReadingTest(unittest.TestCase):
 
     def test_filter_1(self):
         results = list(self.c.filter(bbox=(-120.0, 30.0, -100.0, 50.0)))
-        self.failUnlessEqual(len(results), 67)
+        self.assertEqual(len(results), 67)
         f = results[0]
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
     def test_filter_reset(self):
         results = list(self.c.filter(bbox=(-112.0, 38.0, -106.0, 40.0)))
-        self.failUnlessEqual(len(results), 26)
+        self.assertEqual(len(results), 26)
         results = list(self.c.filter())
-        self.failUnlessEqual(len(results), 67)
-        
+        self.assertEqual(len(results), 67)
+
     def test_filter_mask(self):
         mask = {
             'type': 'Polygon',
             'coordinates': (
                 ((-112, 38), (-112, 40), (-106, 40), (-106, 38), (-112, 38)),)}
         results = list(self.c.filter(mask=mask))
-        self.failUnlessEqual(len(results), 26)
+        self.assertEqual(len(results), 26)
 
 
 class UnsupportedDriverTest(unittest.TestCase):
-    
+
     def test_immediate_fail_driver(self):
         schema = {
-            'geometry': 'Point', 
-            'properties': {'label': 'str', u'verit\xe9': 'int'} }
+            'geometry': 'Point',
+            'properties': {'label': 'str', u'verit\xe9': 'int'}}
         self.assertRaises(
-            DriverError, 
+            DriverError,
             fiona.open, os.path.join(TEMPDIR, "foo"), "w", "Bogus", schema=schema)
 
 
@@ -295,14 +315,11 @@ class GenericWritingTest(unittest.TestCase):
     def setUp(self):
         self.tempdir = tempfile.mkdtemp()
         schema = {
-            'geometry': 'Point', 
-            'properties': [('label', 'str'), (u'verit\xe9', 'int')] }
-        self.c = fiona.open(
-                os.path.join(self.tempdir, "test-no-iter.shp"),
-                "w", 
-                "ESRI Shapefile", 
-                schema=schema,
-                encoding='Windows-1252')
+            'geometry': 'Point',
+            'properties': [('label', 'str'), (u'verit\xe9', 'int')]}
+        self.c = fiona.open(os.path.join(self.tempdir, "test-no-iter.shp"),
+                            'w', driver="ESRI Shapefile", schema=schema,
+                            encoding='Windows-1252')
 
     def tearDown(self):
         self.c.close()
@@ -328,7 +345,7 @@ class PointWritingTest(unittest.TestCase):
             "w",
             driver="ESRI Shapefile",
             schema={
-                'geometry': 'Point', 
+                'geometry': 'Point',
                 'properties': [('title', 'str'), ('date', 'date')]},
             crs='epsg:4326',
             encoding='utf-8')
@@ -340,20 +357,18 @@ class PointWritingTest(unittest.TestCase):
     def test_cpg(self):
         """Requires GDAL 1.9"""
         self.sink.close()
-        self.failUnless(
-            open(
-                os.path.join(self.tempdir, "point_writing_test.cpg")
-                ).readline() == 'UTF-8')
+        self.assertTrue(open(os.path.join(
+            self.tempdir, "point_writing_test.cpg")).readline() == 'UTF-8')
 
     def test_write_one(self):
-        self.failUnlessEqual(len(self.sink), 0)
-        self.failUnlessEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
+        self.assertEqual(len(self.sink), 0)
+        self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
         f = {
             'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)},
             'properties': {'title': 'point one', 'date': "2012-01-29"}}
         self.sink.writerecords([f])
-        self.failUnlessEqual(len(self.sink), 1)
-        self.failUnlessEqual(self.sink.bounds, (0.0, 0.1, 0.0, 0.1))
+        self.assertEqual(len(self.sink), 1)
+        self.assertEqual(self.sink.bounds, (0.0, 0.1, 0.0, 0.1))
         self.sink.close()
         info = subprocess.check_output(
             ["ogrinfo", self.filename, "point_writing_test"])
@@ -362,8 +377,8 @@ class PointWritingTest(unittest.TestCase):
             info)
 
     def test_write_two(self):
-        self.failUnlessEqual(len(self.sink), 0)
-        self.failUnlessEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
+        self.assertEqual(len(self.sink), 0)
+        self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
         f1 = {
             'geometry': {'type': 'Point', 'coordinates': (0.0, 0.1)},
             'properties': {'title': 'point one', 'date': "2012-01-29"}}
@@ -371,18 +386,18 @@ class PointWritingTest(unittest.TestCase):
             'geometry': {'type': 'Point', 'coordinates': (0.0, -0.1)},
             'properties': {'title': 'point two', 'date': "2012-01-29"}}
         self.sink.writerecords([f1, f2])
-        self.failUnlessEqual(len(self.sink), 2)
-        self.failUnlessEqual(self.sink.bounds, (0.0, -0.1, 0.0, 0.1))
+        self.assertEqual(len(self.sink), 2)
+        self.assertEqual(self.sink.bounds, (0.0, -0.1, 0.0, 0.1))
 
     def test_write_one_null_geom(self):
-        self.failUnlessEqual(len(self.sink), 0)
-        self.failUnlessEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
+        self.assertEqual(len(self.sink), 0)
+        self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
         f = {
             'geometry': None,
             'properties': {'title': 'point one', 'date': "2012-01-29"}}
         self.sink.writerecords([f])
-        self.failUnlessEqual(len(self.sink), 1)
-        self.failUnlessEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
+        self.assertEqual(len(self.sink), 1)
+        self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
 
     def test_validate_record(self):
         fvalid = {
@@ -404,41 +419,40 @@ class LineWritingTest(unittest.TestCase):
             "w",
             driver="ESRI Shapefile",
             schema={
-                'geometry': 'LineString', 
+                'geometry': 'LineString',
                 'properties': [('title', 'str'), ('date', 'date')]},
             crs={'init': "epsg:4326", 'no_defs': True})
 
     def tearDown(self):
         self.sink.close()
         shutil.rmtree(self.tempdir)
-    
+
     def test_write_one(self):
-        self.failUnlessEqual(len(self.sink), 0)
-        self.failUnlessEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
+        self.assertEqual(len(self.sink), 0)
+        self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
         f = {
-            'geometry': {'type': 'LineString', 
+            'geometry': {'type': 'LineString',
                          'coordinates': [(0.0, 0.1), (0.0, 0.2)]},
             'properties': {'title': 'line one', 'date': "2012-01-29"}}
         self.sink.writerecords([f])
-        self.failUnlessEqual(len(self.sink), 1)
-        self.failUnlessEqual(self.sink.bounds, (0.0, 0.1, 0.0, 0.2))
+        self.assertEqual(len(self.sink), 1)
+        self.assertEqual(self.sink.bounds, (0.0, 0.1, 0.0, 0.2))
 
     def test_write_two(self):
-        self.failUnlessEqual(len(self.sink), 0)
-        self.failUnlessEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
+        self.assertEqual(len(self.sink), 0)
+        self.assertEqual(self.sink.bounds, (0.0, 0.0, 0.0, 0.0))
         f1 = {
-            'geometry': {'type': 'LineString', 
+            'geometry': {'type': 'LineString',
                          'coordinates': [(0.0, 0.1), (0.0, 0.2)]},
             'properties': {'title': 'line one', 'date': "2012-01-29"}}
         f2 = {
-            'geometry': {'type': 'MultiLineString', 
-                         'coordinates': [
-                            [(0.0, 0.0), (0.0, -0.1)], 
-                            [(0.0, -0.1), (0.0, -0.2)] ]},
+            'geometry': {'type': 'MultiLineString',
+                         'coordinates': [[(0.0, 0.0), (0.0, -0.1)],
+                                         [(0.0, -0.1), (0.0, -0.2)]]},
             'properties': {'title': 'line two', 'date': "2012-01-29"}}
         self.sink.writerecords([f1, f2])
-        self.failUnlessEqual(len(self.sink), 2)
-        self.failUnlessEqual(self.sink.bounds, (0.0, -0.2, 0.0, 0.2))
+        self.assertEqual(len(self.sink), 2)
+        self.assertEqual(self.sink.bounds, (0.0, -0.2, 0.0, 0.2))
 
 
 class PointAppendTest(unittest.TestCase):
@@ -450,12 +464,12 @@ class PointAppendTest(unittest.TestCase):
             output_schema['geometry'] = '3D Point'
             with fiona.open(
                     os.path.join(self.tempdir, "test_append_point.shp"),
-                    "w", crs=None, driver="ESRI Shapefile", schema=output_schema
-                    ) as output:
+                    'w', crs=None, driver="ESRI Shapefile",
+                    schema=output_schema) as output:
                 for f in input:
                     f['geometry'] = {
                         'type': 'Point',
-                        'coordinates': f['geometry']['coordinates'][0][0] }
+                        'coordinates': f['geometry']['coordinates'][0][0]}
                     output.write(f)
 
     def tearDown(self):
@@ -465,16 +479,16 @@ class PointAppendTest(unittest.TestCase):
         with fiona.open(os.path.join(self.tempdir, "test_append_point.shp"), "a") as c:
             self.assertEqual(c.schema['geometry'], '3D Point')
             c.write({'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)},
-                     'properties': { 'PERIMETER': 1.0,
-                                     'FEATURE2': None,
-                                     'NAME': 'Foo',
-                                     'FEATURE1': None,
-                                     'URL': 'http://example.com',
-                                     'AGBUR': 'BAR',
-                                     'AREA': 0.0,
-                                     'STATE_FIPS': 1,
-                                     'WILDRNP020': 1,
-                                     'STATE': 'XL' } })
+                     'properties': {'PERIMETER': 1.0,
+                                    'FEATURE2': None,
+                                    'NAME': 'Foo',
+                                    'FEATURE1': None,
+                                    'URL': 'http://example.com',
+                                    'AGBUR': 'BAR',
+                                    'AREA': 0.0,
+                                    'STATE_FIPS': 1,
+                                    'WILDRNP020': 1,
+                                    'STATE': 'XL'}})
             self.assertEqual(len(c), 68)
 
 
@@ -487,12 +501,12 @@ class LineAppendTest(unittest.TestCase):
                 "w",
                 driver="ESRI Shapefile",
                 schema={
-                    'geometry': 'MultiLineString', 
+                    'geometry': 'MultiLineString',
                     'properties': {'title': 'str', 'date': 'date'}},
                 crs={'init': "epsg:4326", 'no_defs': True}) as output:
-            f = {'geometry': {'type': 'MultiLineString', 
+            f = {'geometry': {'type': 'MultiLineString',
                               'coordinates': [[(0.0, 0.1), (0.0, 0.2)]]},
-                'properties': {'title': 'line one', 'date': "2012-01-29"}}
+                 'properties': {'title': 'line one', 'date': "2012-01-29"}}
             output.writerecords([f])
 
     def tearDown(self):
@@ -502,35 +516,34 @@ class LineAppendTest(unittest.TestCase):
         with fiona.open(os.path.join(self.tempdir, "test_append_line.shp"), "a") as c:
             self.assertEqual(c.schema['geometry'], 'LineString')
             f1 = {
-                'geometry': {'type': 'LineString', 
+                'geometry': {'type': 'LineString',
                              'coordinates': [(0.0, 0.1), (0.0, 0.2)]},
                 'properties': {'title': 'line one', 'date': "2012-01-29"}}
             f2 = {
-                'geometry': {'type': 'MultiLineString', 
-                             'coordinates': [
-                                [(0.0, 0.0), (0.0, -0.1)], 
-                                [(0.0, -0.1), (0.0, -0.2)] ]},
+                'geometry': {'type': 'MultiLineString',
+                             'coordinates': [[(0.0, 0.0), (0.0, -0.1)],
+                                             [(0.0, -0.1), (0.0, -0.2)]]},
                 'properties': {'title': 'line two', 'date': "2012-01-29"}}
             c.writerecords([f1, f2])
-            self.failUnlessEqual(len(c), 3)
-            self.failUnlessEqual(c.bounds, (0.0, -0.2, 0.0, 0.2))
+            self.assertEqual(len(c), 3)
+            self.assertEqual(c.bounds, (0.0, -0.2, 0.0, 0.2))
 
 
 class ShapefileFieldWidthTest(unittest.TestCase):
-    
+
     def test_text(self):
         self.tempdir = tempfile.mkdtemp()
-        with fiona.open(os.path.join(self.tempdir, "textfield.shp"), "w",
-                driver="ESRI Shapefile",
-                schema={'geometry': 'Point', 'properties': {'text': 'str:254'}}
-                ) as c:
+        with fiona.open(
+                os.path.join(self.tempdir, "textfield.shp"), 'w',
+                schema={'geometry': 'Point', 'properties': {'text': 'str:254'}},
+                driver="ESRI Shapefile") as c:
             c.write(
                 {'geometry': {'type': 'Point', 'coordinates': (0.0, 45.0)},
-                 'properties': { 'text': 'a' * 254 }})
+                 'properties': {'text': 'a' * 254}})
         c = fiona.open(os.path.join(self.tempdir, "textfield.shp"), "r")
-        self.failUnlessEqual(c.schema['properties']['text'], 'str:254')
+        self.assertEqual(c.schema['properties']['text'], 'str:254')
         f = next(iter(c))
-        self.failUnlessEqual(f['properties']['text'], 'a' * 254)
+        self.assertEqual(f['properties']['text'], 'a' * 254)
         c.close()
 
     def tearDown(self):
@@ -567,7 +580,7 @@ class GeoJSONCRSWritingTest(unittest.TestCase):
             "w",
             driver="GeoJSON",
             schema={
-                'geometry': 'Point', 
+                'geometry': 'Point',
                 'properties': [('title', 'str'), ('date', 'date')]},
             crs={'a': 6370997, 'lon_0': -100, 'y_0': 0, 'no_defs': True, 'proj': 'laea', 'x_0': 0, 'units': 'm', 'b': 6370997, 'lat_0': 45})
 
@@ -611,14 +624,14 @@ class DateTimeTest(unittest.TestCase):
         }]
         self.sink.writerecords(recs)
         self.sink.close()
-        self.failUnlessEqual(len(self.sink), 2)
+        self.assertEqual(len(self.sink), 2)
 
         c = fiona.open(os.path.join(self.tempdir, "date_test.shp"), "r")
-        self.failUnlessEqual(len(c), 2)
+        self.assertEqual(len(c), 2)
 
         rf1, rf2 = list(c)
-        self.failUnlessEqual(rf1['properties']['date'], '2013-02-25')
-        self.failUnlessEqual(rf2['properties']['date'], '2014-02-03')
+        self.assertEqual(rf1['properties']['date'], '2013-02-25')
+        self.assertEqual(rf2['properties']['date'], '2014-02-03')
 
     def tearDown(self):
         shutil.rmtree(self.tempdir)
diff --git a/tests/test_collection_crs.py b/tests/test_collection_crs.py
index ce483ad..114684e 100644
--- a/tests/test_collection_crs.py
+++ b/tests/test_collection_crs.py
@@ -2,11 +2,13 @@ import os
 import tempfile
 
 import fiona
+import fiona.crs
 
 
 def test_collection_crs_wkt():
     with fiona.open('tests/data/coutwildrnp.shp') as src:
-        assert src.crs_wkt.startswith('GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84"')
+        assert src.crs_wkt.startswith(
+            'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84"')
 
 
 def test_collection_no_crs_wkt():
diff --git a/tests/test_crs.py b/tests/test_crs.py
index 6b65829..8b93b02 100644
--- a/tests/test_crs.py
+++ b/tests/test_crs.py
@@ -1,80 +1,116 @@
+from fiona import crs, _crs
 
-from fiona import crs
 
 def test_proj_keys():
-    assert len(crs.all_proj_keys) == 86
+    assert len(crs.all_proj_keys) == 87
     assert 'init' in crs.all_proj_keys
     assert 'proj' in crs.all_proj_keys
     assert 'no_mayo' in crs.all_proj_keys
 
+
 def test_from_string():
     # A PROJ.4 string with extra whitespace.
     val = crs.from_string(
-        " +proj=longlat +ellps=WGS84 +datum=WGS84  +no_defs +foo  " )
+        " +proj=longlat +ellps=WGS84 +datum=WGS84  +no_defs +foo  ")
     assert len(val.items()) == 4
     assert val['proj'] == 'longlat'
     assert val['ellps'] == 'WGS84'
     assert val['datum'] == 'WGS84'
-    assert val['no_defs'] == True
+    assert val['no_defs']
     assert 'foo' not in val
 
+
 def test_from_string_utm():
     # A PROJ.4 string with extra whitespace and integer UTM zone.
     val = crs.from_string(
-        " +proj=utm +zone=13 +ellps=WGS84 +foo  " )
+        " +proj=utm +zone=13 +ellps=WGS84 +foo  ")
     assert len(val.items()) == 3
     assert val['proj'] == 'utm'
     assert val['ellps'] == 'WGS84'
     assert val['zone'] == 13
     assert 'foo' not in val
 
+
 def test_to_string():
     # Make a string from a mapping with a few bogus items
     val = {
-        'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 
-        'no_defs': True, 'foo': True, 'axis': False, 'belgium': [1,2] }
+        'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84',
+        'no_defs': True, 'foo': True, 'axis': False, 'belgium': [1, 2]}
     assert crs.to_string(
         val) == "+datum=WGS84 +ellps=WGS84 +no_defs +proj=longlat"
 
+
 def test_to_string_utm():
     # Make a string from a mapping with a few bogus items
     val = {
-        'proj': 'utm', 'ellps': 'WGS84', 'zone': 13, 
-        'no_defs': True, 'foo': True, 'axis': False, 'belgium': [1,2] }
+        'proj': 'utm', 'ellps': 'WGS84', 'zone': 13,
+        'no_defs': True, 'foo': True, 'axis': False, 'belgium': [1, 2]}
     assert crs.to_string(
         val) == "+ellps=WGS84 +no_defs +proj=utm +zone=13"
 
+
 def test_to_string_epsg():
     val = {'init': 'epsg:4326', 'no_defs': True}
     assert crs.to_string(val) == "+init=epsg:4326 +no_defs"
-       
+
+
 def test_to_string_zeroval():
     # Make a string with some 0 values (e.g. esri:102017)
-    val = {'proj': 'laea', 'lat_0': 90, 'lon_0': 0, 'x_0': 0, 'y_0': 0, 
+    val = {'proj': 'laea', 'lat_0': 90, 'lon_0': 0, 'x_0': 0, 'y_0': 0,
            'ellps': 'WGS84', 'datum': 'WGS84', 'units': 'm', 'no_defs': True}
     assert crs.to_string(val) == (
         "+datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs +proj=laea "
         "+units=m +x_0=0 +y_0=0")
 
+
 def test_from_epsg():
     val = crs.from_epsg(4326)
     assert val['init'] == "epsg:4326"
-    assert val['no_defs'] == True
+    assert val['no_defs']
+
 
 def test_from_epsg_neg():
     try:
-        val = crs.from_epsg(-1)
+        crs.from_epsg(-1)
     except ValueError:
         pass
     except:
         raise
 
+
 def test_to_string_unicode():
     # See issue #83.
     val = crs.to_string({
-        u'units': u'm', 
-        u'no_defs': True, 
-        u'datum': u'NAD83', 
-        u'proj': u'utm', 
+        u'units': u'm',
+        u'no_defs': True,
+        u'datum': u'NAD83',
+        u'proj': u'utm',
         u'zone': 16})
     assert 'NAD83' in val
+
+
+def test_wktext():
+    """Test +wktext parameter is preserved."""
+    proj4 = ('+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 '
+             '+x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext '
+             '+no_defs')
+    assert 'wktext' in crs.from_string(proj4)
+
+
+def test_towgs84():
+    """+towgs84 is preserved"""
+    proj4 = ('+proj=lcc +lat_1=49 +lat_2=46 +lat_0=47.5 '
+             '+lon_0=13.33333333333333 +x_0=400000 +y_0=400000 +ellps=bessel '
+             '+towgs84=577.326,90.129,463.919,5.137,1.474,5.297,2.4232 '
+             '+units=m +wktext +no_defs')
+    assert 'towgs84' in crs.from_string(proj4)
+
+
+def test_towgs84_wkt():
+    """+towgs84 +wktext are preserved in WKT"""
+    proj4 = ('+proj=lcc +lat_1=49 +lat_2=46 +lat_0=47.5 '
+             '+lon_0=13.33333333333333 +x_0=400000 +y_0=400000 +ellps=bessel '
+             '+towgs84=577.326,90.129,463.919,5.137,1.474,5.297,2.4232 '
+             '+units=m +wktext +no_defs')
+    assert 'towgs84' in _crs.crs_to_wkt(proj4)
+    assert 'wktext' in _crs.crs_to_wkt(proj4)
diff --git a/tests/test_feature.py b/tests/test_feature.py
index cca366a..afb2cb1 100644
--- a/tests/test_feature.py
+++ b/tests/test_feature.py
@@ -27,7 +27,7 @@ class PointRoundTripTest(unittest.TestCase):
               'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)},
               'properties': {'title': u'foo'} }
         g = featureRT(f, self.c)
-        self.failUnlessEqual(
+        self.assertEqual(
             sorted(g['geometry'].items()),
             [('coordinates', (0.0, 0.0)), ('type', 'Point')])
     def test_properties(self):
@@ -35,13 +35,13 @@ class PointRoundTripTest(unittest.TestCase):
               'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)},
               'properties': {'title': u'foo'} }
         g = featureRT(f, self.c)
-        self.failUnlessEqual(g['properties']['title'], 'foo')
+        self.assertEqual(g['properties']['title'], 'foo')
     def test_none_property(self):
         f = { 'id': '1',
               'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)},
               'properties': {'title': None} }
         g = featureRT(f, self.c)
-        self.failUnlessEqual(g['properties']['title'], None)
+        self.assertEqual(g['properties']['title'], None)
 
 class LineStringRoundTripTest(unittest.TestCase):
     def setUp(self):
@@ -58,7 +58,7 @@ class LineStringRoundTripTest(unittest.TestCase):
                             'coordinates': [(0.0, 0.0), (1.0, 1.0)] },
               'properties': {'title': u'foo'} }
         g = featureRT(f, self.c)
-        self.failUnlessEqual(
+        self.assertEqual(
             sorted(g['geometry'].items()),
             [('coordinates', [(0.0, 0.0), (1.0, 1.0)]), 
              ('type', 'LineString')])
@@ -67,7 +67,7 @@ class LineStringRoundTripTest(unittest.TestCase):
               'geometry': {'type': 'Point', 'coordinates': (0.0, 0.0)},
               'properties': {'title': u'foo'} }
         g = featureRT(f, self.c)
-        self.failUnlessEqual(g['properties']['title'], 'foo')
+        self.assertEqual(g['properties']['title'], 'foo')
 
 class PolygonRoundTripTest(unittest.TestCase):
     def setUp(self):
@@ -89,7 +89,7 @@ class PolygonRoundTripTest(unittest.TestCase):
                                   (0.0, 0.0)]] },
               'properties': {'title': u'foo'} }
         g = featureRT(f, self.c)
-        self.failUnlessEqual(
+        self.assertEqual(
             sorted(g['geometry'].items()),
             [('coordinates', [[(0.0, 0.0), 
                                   (0.0, 1.0), 
@@ -108,5 +108,5 @@ class PolygonRoundTripTest(unittest.TestCase):
                                   (0.0, 0.0)]] },
               'properties': {'title': u'foo'} }
         g = featureRT(f, self.c)
-        self.failUnlessEqual(g['properties']['title'], 'foo')
+        self.assertEqual(g['properties']['title'], 'foo')
 
diff --git a/tests/test_fio_calc.py b/tests/test_fio_calc.py
new file mode 100644
index 0000000..aba267b
--- /dev/null
+++ b/tests/test_fio_calc.py
@@ -0,0 +1,71 @@
+from __future__ import division
+import json
+
+from click.testing import CliRunner
+
+from fiona.fio.calc import calc
+from .fixtures import feature_seq
+
+
+def test_fail():
+    runner = CliRunner()
+    result = runner.invoke(calc,
+                           ["TEST", "f.properties.test > 5"],
+                           '{"type": "no_properties"}')
+    assert result.exit_code == 1
+
+
+def _load(output):
+    features = []
+    for x in output.splitlines():
+        try:
+            features.append(json.loads(x))
+        except:
+            pass  # nosetests puts some debugging garbage to stdout
+    return features
+
+
+def test_calc_seq():
+    runner = CliRunner()
+
+    result = runner.invoke(calc,
+                           ["TEST", "f.properties.AREA / f.properties.PERIMETER"],
+                           feature_seq)
+    assert result.exit_code == 0
+
+    feats = _load(result.output)
+    assert len(feats) == 2
+    for feat in feats:
+        assert feat['properties']['TEST'] == \
+            feat['properties']['AREA'] / feat['properties']['PERIMETER']
+
+
+def test_bool_seq():
+    runner = CliRunner()
+
+    result = runner.invoke(calc,
+                           ["TEST", "f.properties.AREA > 0.015"],
+                           feature_seq)
+    assert result.exit_code == 0
+    feats = _load(result.output)
+    assert len(feats) == 2
+    assert feats[0]['properties']['TEST'] == True
+    assert feats[1]['properties']['TEST'] == False
+
+
+def test_existing_property():
+    runner = CliRunner()
+
+    result = runner.invoke(calc,
+                           ["AREA", "f.properties.AREA * 2"],
+                           feature_seq)
+    assert result.exit_code == 1
+
+    result = runner.invoke(calc,
+                           ["--overwrite", "AREA", "f.properties.AREA * 2"],
+                           feature_seq)
+    assert result.exit_code == 0
+    feats = _load(result.output)
+    assert len(feats) == 2
+    for feat in feats:
+        assert 'AREA' in feat['properties']
diff --git a/tests/test_fio_cat.py b/tests/test_fio_cat.py
index 94f5d1f..f203bc5 100644
--- a/tests/test_fio_cat.py
+++ b/tests/test_fio_cat.py
@@ -1,12 +1,11 @@
 import json
 
-import click
 from click.testing import CliRunner
 
 from fiona.fio import cat
 
-from .fixtures import (
-    feature_collection, feature_collection_pp, feature_seq, feature_seq_pp_rs)
+from .fixtures import feature_seq
+from .fixtures import feature_seq_pp_rs
 
 
 WILDSHP = 'tests/data/coutwildrnp.shp'
@@ -54,65 +53,3 @@ def test_bbox_json_yes():
         catch_exceptions=False)
     assert result.exit_code == 0
     assert result.output.count('"Feature"') == 19
-
-
-def test_collect_rs():
-    runner = CliRunner()
-    result = runner.invoke(
-        cat.collect,
-        ['--src-crs', 'EPSG:3857'],
-        feature_seq_pp_rs,
-        catch_exceptions=False)
-    assert result.exit_code == 0
-    assert result.output.count('"Feature"') == 2
-
-
-def test_collect_no_rs():
-    runner = CliRunner()
-    result = runner.invoke(
-        cat.collect,
-        ['--src-crs', 'EPSG:3857'],
-        feature_seq,
-        catch_exceptions=False)
-    assert result.exit_code == 0
-    assert result.output.count('"Feature"') == 2
-
-
-def test_collect_ld():
-    runner = CliRunner()
-    result = runner.invoke(
-        cat.collect,
-        ['--with-ld-context', '--add-ld-context-item', 'foo=bar'],
-        feature_seq,
-        catch_exceptions=False)
-    assert result.exit_code == 0
-    assert '"@context": {' in result.output
-    assert '"foo": "bar"' in result.output
-
-
-def test_collect_rec_buffered():
-    runner = CliRunner()
-    result = runner.invoke(cat.collect, ['--record-buffered'], feature_seq)
-    assert result.exit_code == 0
-    assert '"FeatureCollection"' in result.output
-
-
-def test_distrib():
-    runner = CliRunner()
-    result = runner.invoke(cat.distrib, [], feature_collection_pp)
-    assert result.exit_code == 0
-    assert result.output.count('"Feature"') == 2
-
-
-def test_distrib_no_rs():
-    runner = CliRunner()
-    result = runner.invoke(cat.distrib, [], feature_collection)
-    assert result.exit_code == 0
-    assert result.output.count('"Feature"') == 2
-
-
-def test_dump():
-    runner = CliRunner()
-    result = runner.invoke(cat.dump, [WILDSHP])
-    assert result.exit_code == 0
-    assert '"FeatureCollection"' in result.output
diff --git a/tests/test_fio_collect.py b/tests/test_fio_collect.py
new file mode 100644
index 0000000..b1a2e4c
--- /dev/null
+++ b/tests/test_fio_collect.py
@@ -0,0 +1,98 @@
+"""Unittests for $ fio collect"""
+
+
+import json
+
+from click.testing import CliRunner
+
+from fiona.fio import collect
+
+from .fixtures import feature_seq
+from .fixtures import feature_seq_pp_rs
+
+
+def test_collect_rs():
+    runner = CliRunner()
+    result = runner.invoke(
+        collect.collect,
+        ['--src-crs', 'EPSG:3857'],
+        feature_seq_pp_rs,
+        catch_exceptions=False)
+    assert result.exit_code == 0
+    assert result.output.count('"Feature"') == 2
+
+
+def test_collect_no_rs():
+    runner = CliRunner()
+    result = runner.invoke(
+        collect.collect,
+        ['--src-crs', 'EPSG:3857'],
+        feature_seq,
+        catch_exceptions=False)
+    assert result.exit_code == 0
+    assert result.output.count('"Feature"') == 2
+
+
+def test_collect_ld():
+    runner = CliRunner()
+    result = runner.invoke(
+        collect.collect,
+        ['--with-ld-context', '--add-ld-context-item', 'foo=bar'],
+        feature_seq,
+        catch_exceptions=False)
+    assert result.exit_code == 0
+    assert '"@context": {' in result.output
+    assert '"foo": "bar"' in result.output
+
+
+def test_collect_rec_buffered():
+    runner = CliRunner()
+    result = runner.invoke(collect.collect, ['--record-buffered'], feature_seq)
+    assert result.exit_code == 0
+    assert '"FeatureCollection"' in result.output
+
+
+def test_collect_noparse():
+    runner = CliRunner()
+    result = runner.invoke(
+        collect.collect,
+        ['--no-parse'],
+        feature_seq,
+        catch_exceptions=False)
+    assert result.exit_code == 0
+    assert result.output.count('"Feature"') == 2
+    assert len(json.loads(result.output)['features']) == 2
+
+
+def test_collect_noparse_records():
+    runner = CliRunner()
+    result = runner.invoke(
+        collect.collect,
+        ['--no-parse', '--record-buffered'],
+        feature_seq,
+        catch_exceptions=False)
+    assert result.exit_code == 0
+    assert result.output.count('"Feature"') == 2
+    assert len(json.loads(result.output)['features']) == 2
+
+
+def test_collect_src_crs():
+    runner = CliRunner()
+    result = runner.invoke(
+        collect.collect,
+        ['--no-parse', '--src-crs', 'epsg:4326'],
+        feature_seq,
+        catch_exceptions=False)
+    assert result.exit_code == 2
+
+
+def test_collect_noparse_rs():
+    runner = CliRunner()
+    result = runner.invoke(
+        collect.collect,
+        ['--no-parse'],
+        feature_seq_pp_rs,
+        catch_exceptions=False)
+    assert result.exit_code == 0
+    assert result.output.count('"Feature"') == 2
+    assert len(json.loads(result.output)['features']) == 2
diff --git a/tests/test_fio_distrib.py b/tests/test_fio_distrib.py
new file mode 100644
index 0000000..bf8a008
--- /dev/null
+++ b/tests/test_fio_distrib.py
@@ -0,0 +1,23 @@
+"""Unittests for $ fio distrib"""
+
+
+from click.testing import CliRunner
+
+from fiona.fio import distrib
+
+from .fixtures import feature_collection
+from .fixtures import feature_collection_pp
+
+
+def test_distrib():
+    runner = CliRunner()
+    result = runner.invoke(distrib.distrib, [], feature_collection_pp)
+    assert result.exit_code == 0
+    assert result.output.count('"Feature"') == 2
+
+
+def test_distrib_no_rs():
+    runner = CliRunner()
+    result = runner.invoke(distrib.distrib, [], feature_collection)
+    assert result.exit_code == 0
+    assert result.output.count('"Feature"') == 2
diff --git a/tests/test_fio_dump.py b/tests/test_fio_dump.py
new file mode 100644
index 0000000..8efc3b8
--- /dev/null
+++ b/tests/test_fio_dump.py
@@ -0,0 +1,16 @@
+"""Unittests for $ fio dump"""
+
+
+from click.testing import CliRunner
+
+from fiona.fio import dump
+
+
+WILDSHP = 'tests/data/coutwildrnp.shp'
+
+
+def test_dump():
+    runner = CliRunner()
+    result = runner.invoke(dump.dump, [WILDSHP])
+    assert result.exit_code == 0
+    assert '"FeatureCollection"' in result.output
diff --git a/tests/test_fio_filter.py b/tests/test_fio_filter.py
new file mode 100644
index 0000000..ffc3824
--- /dev/null
+++ b/tests/test_fio_filter.py
@@ -0,0 +1,29 @@
+from click.testing import CliRunner
+
+from fiona.fio import filter
+
+from .fixtures import feature_seq
+
+
+def test_fail():
+    runner = CliRunner()
+    result = runner.invoke(filter.filter,
+                           ["f.properties.test > 5"],
+                           "{'type': 'no_properties'}")
+    assert result.exit_code == 1
+
+
+def test_seq():
+    runner = CliRunner()
+
+    result = runner.invoke(filter.filter, ["f.properties.AREA > 0.01"], feature_seq)
+    assert result.exit_code == 0
+    assert result.output.count('Feature') == 2
+
+    result = runner.invoke(filter.filter, ["f.properties.AREA > 0.015"], feature_seq)
+    assert result.exit_code == 0
+    assert result.output.count('Feature') == 1
+
+    result = runner.invoke(filter.filter, ["f.properties.AREA > 0.02"], feature_seq)
+    assert result.exit_code == 0
+    assert result.output.count('Feature') == 0
diff --git a/tests/test_fio_info.py b/tests/test_fio_info.py
new file mode 100644
index 0000000..c36622f
--- /dev/null
+++ b/tests/test_fio_info.py
@@ -0,0 +1,73 @@
+import json
+from pkg_resources import iter_entry_points
+import re
+
+from click.testing import CliRunner
+
+from fiona.fio.main import main_group
+
+
+WILDSHP = 'tests/data/coutwildrnp.shp'
+
+
+def test_info_json():
+    runner = CliRunner()
+    result = runner.invoke(main_group, ['info', WILDSHP])
+    assert result.exit_code == 0
+    assert '"count": 67' in result.output
+    assert '"crs": "EPSG:4326"' in result.output
+    assert '"driver": "ESRI Shapefile"' in result.output
+    assert '"name": "coutwildrnp"' in result.output
+
+
+def test_info_count():
+    runner = CliRunner()
+    result = runner.invoke(main_group, ['info', '--count', WILDSHP])
+    assert result.exit_code == 0
+    assert result.output == "67\n"
+
+
+def test_info_bounds():
+    runner = CliRunner()
+    result = runner.invoke(main_group, ['info', '--bounds', WILDSHP])
+    assert result.exit_code == 0
+    assert len(re.findall(r'\d*\.\d*', result.output)) == 4
+
+
+def test_all_registered():
+    # Make sure all the subcommands are actually registered to the main CLI group
+    for ep in iter_entry_points('fiona.fio_commands'):
+        assert ep.name in main_group.commands
+
+
+def _filter_info_warning(lines):
+    """$ fio info can issue a RuntimeWarning, but click adds stderr to stdout
+    so we have to filter it out before decoding JSON lines."""
+    lines = list(filter(lambda x: 'RuntimeWarning' not in x, lines))
+    return lines
+
+
+def test_info_no_count():
+    """Make sure we can still get a `$ fio info` report on datasources that do
+    not support feature counting, AKA `len(collection)`.
+    """
+    runner = CliRunner()
+    result = runner.invoke(main_group, ['info', 'tests/data/test_gpx.gpx'])
+    assert result.exit_code == 0
+    lines = _filter_info_warning(result.output.splitlines())
+    assert len(lines) == 1, "First line is warning & second is JSON.  No more."
+    assert json.loads(lines[0])['count'] is None
+
+
+def test_info_layer():
+    for layer in ('routes', '1'):
+        runner = CliRunner()
+        result = runner.invoke(main_group, [
+            'info',
+            'tests/data/test_gpx.gpx',
+            '--layer', layer])
+        print(result.output)
+        assert result.exit_code == 0
+        lines = _filter_info_warning(result.output.splitlines())
+        assert len(lines) == 1, "1st line is warning & 2nd is JSON - no more."
+        assert json.loads(lines[0])['name'] == 'routes'
diff --git a/tests/test_fio_load.py b/tests/test_fio_load.py
index cfe254f..fce5f08 100644
--- a/tests/test_fio_load.py
+++ b/tests/test_fio_load.py
@@ -1,4 +1,6 @@
+import json
 import os
+import shutil
 import tempfile
 
 from click.testing import CliRunner
@@ -119,3 +121,38 @@ def test_dst_crs_no_src(tmpdir=None):
     with fiona.open(tmpfile) as src:
         assert src.crs == {'init': 'epsg:32610'}
         assert len(src) == len(feature_seq.splitlines())
+
+
+def test_fio_load_layer():
+
+    tmpdir = tempfile.mkdtemp()
+    try:
+        feature = {
+            'type': 'Feature',
+            'properties': {'key': 'value'},
+            'geometry': {
+                'type': 'Point',
+                'coordinates': (5.0, 39.0)
+            }
+        }
+
+        sequence = os.linesep.join(map(json.dumps, [feature, feature]))
+
+        runner = CliRunner()
+        result = runner.invoke(main_group, [
+            'load',
+            tmpdir,
+            '--driver', 'ESRI Shapefile',
+            '--src-crs', 'EPSG:4236',
+            '--layer', 'test_layer',
+            '--sequence'],
+            input=sequence)
+        assert result.exit_code == 0
+
+        with fiona.open(tmpdir) as src:
+            assert len(src) == 2
+            assert src.name == 'test_layer'
+            assert src.schema['geometry'] == 'Point'
+
+    finally:
+        shutil.rmtree(tmpdir)
diff --git a/tests/test_fio_ls.py b/tests/test_fio_ls.py
new file mode 100644
index 0000000..d8be2b1
--- /dev/null
+++ b/tests/test_fio_ls.py
@@ -0,0 +1,58 @@
+"""Unittests for `$ fio ls`"""
+
+
+import json
+import shutil
+import tempfile
+
+from click.testing import CliRunner
+
+import fiona
+from fiona.fio.main import main_group
+
+
+def test_fio_ls_single_layer():
+
+    result = CliRunner().invoke(main_group, [
+        'ls',
+        'tests/data/'])
+    assert result.exit_code == 0
+    assert len(result.output.splitlines()) == 1
+    assert json.loads(result.output) == ['coutwildrnp']
+
+
+def test_fio_ls_indent():
+
+    result = CliRunner().invoke(main_group, [
+        'ls',
+        '--indent', '4',
+        'tests/data/coutwildrnp.shp'])
+    assert result.exit_code == 0
+    assert len(result.output.strip().splitlines()) == 3
+    assert json.loads(result.output) == ['coutwildrnp']
+
+
+def test_fio_ls_multi_layer():
+
+    infile = 'tests/data/coutwildrnp.shp'
+    outdir = tempfile.mkdtemp()
+    try:
+        
+        # Copy test shapefile into new directory
+        # Shapefile driver treats a directory of shapefiles as a single
+        # multi-layer datasource
+        layer_names = ['l1', 'l2']
+        for layer in layer_names:
+            with fiona.open(infile) as src, \
+                    fiona.open(outdir, 'w', layer=layer, **src.meta) as dst:
+                for feat in src:
+                    dst.write(feat)
+
+        # Run CLI test
+        result = CliRunner().invoke(main_group, [
+            'ls', outdir])
+        assert result.exit_code == 0
+        assert json.loads(result.output) == layer_names
+
+    finally:
+        shutil.rmtree(outdir)
diff --git a/tests/test_geometry.py b/tests/test_geometry.py
index 79aa5ee..58a7c39 100644
--- a/tests/test_geometry.py
+++ b/tests/test_geometry.py
@@ -22,10 +22,10 @@ class OGRBuilderExceptionsTest(unittest.TestCase):
 class RoundTripping(object):
     """Derive type specific classes from this."""
     def test_type(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             geometryRT(self.geom)['type'], self.geom['type'])
     def test_coordinates(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             geometryRT(self.geom)['coordinates'], self.geom['coordinates'])
 
 # All these get their tests from the RoundTripping class.
@@ -56,7 +56,7 @@ class PolygonRoundTripTest2(unittest.TestCase, RoundTripping):
             'coordinates': [
                 [(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]}
     def test_coordinates(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             [geometryRT(self.geom)['coordinates'][0][:-1]], 
             self.geom['coordinates'])
 
@@ -88,7 +88,7 @@ class MultiPolygonRoundTripTest2(unittest.TestCase, RoundTripping):
             'coordinates': 
                 [[[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)]]]}
     def test_coordinates(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             [[geometryRT(self.geom)['coordinates'][0][0][:-1]]], 
             self.geom['coordinates'])
 
@@ -102,10 +102,10 @@ class GeometryCollectionRoundTripTest(unittest.TestCase):
                     'coordinates': [(0.0, 0.0), (1.0, 1.0)]}]}
     def test_len(self):
         result = geometryRT(self.geom)
-        self.failUnlessEqual(len(result['geometries']), 2)
+        self.assertEqual(len(result['geometries']), 2)
     def test_type(self):
         result = geometryRT(self.geom)
-        self.failUnlessEqual(
+        self.assertEqual(
             [g['type'] for g in result['geometries']], 
             ['Point', 'LineString'])
 
@@ -117,8 +117,8 @@ class PointTest(unittest.TestCase):
         except:
             wkb = "010100000000000000000000000000000000000000".decode('hex')
         geom = geometry_wkb(wkb)
-        self.failUnlessEqual(geom['type'], "Point")
-        self.failUnlessEqual(geom['coordinates'], (0.0, 0.0))
+        self.assertEqual(geom['type'], "Point")
+        self.assertEqual(geom['coordinates'], (0.0, 0.0))
 
 class LineStringTest(unittest.TestCase):
     def test_line(self):
@@ -128,8 +128,8 @@ class LineStringTest(unittest.TestCase):
         except:
             wkb = "01020000000200000000000000000000000000000000000000000000000000f03f000000000000f03f".decode('hex')
         geom = geometry_wkb(wkb)
-        self.failUnlessEqual(geom['type'], "LineString")
-        self.failUnlessEqual(geom['coordinates'], [(0.0, 0.0), (1.0, 1.0)])
+        self.assertEqual(geom['type'], "LineString")
+        self.assertEqual(geom['coordinates'], [(0.0, 0.0), (1.0, 1.0)])
 
 class PolygonTest(unittest.TestCase):
     def test_polygon(self):
@@ -139,14 +139,14 @@ class PolygonTest(unittest.TestCase):
         except:
             wkb = "01030000000100000005000000000000000000f03f0000000000000000000000000000f03f000000000000f03f0000000000000000000000000000f03f00000000000000000000000000000000000000000000f03f0000000000000000".decode('hex')
         geom = geometry_wkb(wkb)
-        self.failUnlessEqual(geom['type'], "Polygon")
-        self.failUnlessEqual(len(geom['coordinates']), 1)
-        self.failUnlessEqual(len(geom['coordinates'][0]), 5)
+        self.assertEqual(geom['type'], "Polygon")
+        self.assertEqual(len(geom['coordinates']), 1)
+        self.assertEqual(len(geom['coordinates'][0]), 5)
         x, y = zip(*geom['coordinates'][0])
-        self.failUnlessEqual(min(x), 0.0)
-        self.failUnlessEqual(min(y), 0.0)
-        self.failUnlessEqual(max(x), 1.0)
-        self.failUnlessEqual(max(y), 1.0)
+        self.assertEqual(min(x), 0.0)
+        self.assertEqual(min(y), 0.0)
+        self.assertEqual(max(x), 1.0)
+        self.assertEqual(max(y), 1.0)
 
 class MultiPointTest(unittest.TestCase):
     def test_multipoint(self):
@@ -155,8 +155,8 @@ class MultiPointTest(unittest.TestCase):
         except:
             wkb = "0104000000020000000101000000000000000000000000000000000000000101000000000000000000f03f000000000000f03f".decode('hex')
         geom = geometry_wkb(wkb)
-        self.failUnlessEqual(geom['type'], "MultiPoint")
-        self.failUnlessEqual(geom['coordinates'], [(0.0, 0.0), (1.0, 1.0)])
+        self.assertEqual(geom['type'], "MultiPoint")
+        self.assertEqual(geom['coordinates'], [(0.0, 0.0), (1.0, 1.0)])
 
 class MultiLineStringTest(unittest.TestCase):
     def test_multilinestring(self):
@@ -166,10 +166,10 @@ class MultiLineStringTest(unittest.TestCase):
         except:
             wkb = "01050000000100000001020000000200000000000000000000000000000000000000000000000000f03f000000000000f03f".decode('hex')
         geom = geometry_wkb(wkb)
-        self.failUnlessEqual(geom['type'], "MultiLineString")
-        self.failUnlessEqual(len(geom['coordinates']), 1)
-        self.failUnlessEqual(len(geom['coordinates'][0]), 2)
-        self.failUnlessEqual(geom['coordinates'][0], [(0.0, 0.0), (1.0, 1.0)])
+        self.assertEqual(geom['type'], "MultiLineString")
+        self.assertEqual(len(geom['coordinates']), 1)
+        self.assertEqual(len(geom['coordinates'][0]), 2)
+        self.assertEqual(geom['coordinates'][0], [(0.0, 0.0), (1.0, 1.0)])
 
 class MultiPolygonTest(unittest.TestCase):
     def test_multipolygon(self):
@@ -179,13 +179,13 @@ class MultiPolygonTest(unittest.TestCase):
         except:
             wkb = "01060000000100000001030000000100000005000000000000000000f03f0000000000000000000000000000f03f000000000000f03f0000000000000000000000000000f03f00000000000000000000000000000000000000000000f03f0000000000000000".decode('hex')
         geom = geometry_wkb(wkb)
-        self.failUnlessEqual(geom['type'], "MultiPolygon")
-        self.failUnlessEqual(len(geom['coordinates']), 1)
-        self.failUnlessEqual(len(geom['coordinates'][0]), 1)
-        self.failUnlessEqual(len(geom['coordinates'][0][0]), 5)
+        self.assertEqual(geom['type'], "MultiPolygon")
+        self.assertEqual(len(geom['coordinates']), 1)
+        self.assertEqual(len(geom['coordinates'][0]), 1)
+        self.assertEqual(len(geom['coordinates'][0][0]), 5)
         x, y = zip(*geom['coordinates'][0][0])
-        self.failUnlessEqual(min(x), 0.0)
-        self.failUnlessEqual(min(y), 0.0)
-        self.failUnlessEqual(max(x), 1.0)
-        self.failUnlessEqual(max(y), 1.0)
+        self.assertEqual(min(x), 0.0)
+        self.assertEqual(min(y), 0.0)
+        self.assertEqual(max(x), 1.0)
+        self.assertEqual(max(y), 1.0)
 
diff --git a/tests/test_layer.py b/tests/test_layer.py
index 3e2ff15..c7b07af 100644
--- a/tests/test_layer.py
+++ b/tests/test_layer.py
@@ -24,20 +24,20 @@ class FileReadingTest(ReadingTest):
         self.c.close()
 
     def test_open_repr(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<open Collection 'tests/data/coutwildrnp.shp:coutwildrnp', mode 'r' "
             "at %s>" % hex(id(self.c))))
 
     def test_closed_repr(self):
         self.c.close()
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<closed Collection 'tests/data/coutwildrnp.shp:coutwildrnp', mode 'r' "
             "at %s>" % hex(id(self.c))))
 
     def test_name(self):
-        self.failUnlessEqual(self.c.name, 'coutwildrnp')
+        self.assertEqual(self.c.name, 'coutwildrnp')
 
 class DirReadingTest(ReadingTest):
     
@@ -48,23 +48,23 @@ class DirReadingTest(ReadingTest):
         self.c.close()
 
     def test_open_repr(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<open Collection 'tests/data:coutwildrnp', mode 'r' "
             "at %s>" % hex(id(self.c))))
 
     def test_closed_repr(self):
         self.c.close()
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<closed Collection 'tests/data:coutwildrnp', mode 'r' "
             "at %s>" % hex(id(self.c))))
 
     def test_name(self):
-        self.failUnlessEqual(self.c.name, 'coutwildrnp')
+        self.assertEqual(self.c.name, 'coutwildrnp')
 
     def test_path(self):
-        self.failUnlessEqual(self.c.path, "tests/data")
+        self.assertEqual(self.c.path, "tests/data")
 
 class InvalidLayerTest(unittest.TestCase):
 
diff --git a/tests/test_props.py b/tests/test_props.py
index 2f562ac..7bcefcc 100644
--- a/tests/test_props.py
+++ b/tests/test_props.py
@@ -22,8 +22,8 @@ def test_width_other():
 def test_types():
     assert prop_type('str:254') == text_type
     assert prop_type('str') == text_type
-    assert prop_type('int') == type(0)
-    assert prop_type('float') == type(0.0)
+    assert isinstance(0, prop_type('int'))
+    assert isinstance(0.0, prop_type('float'))
     assert prop_type('date') == FionaDateType
 
 
diff --git a/tests/test_remove.py b/tests/test_remove.py
new file mode 100644
index 0000000..f665ed3
--- /dev/null
+++ b/tests/test_remove.py
@@ -0,0 +1,77 @@
+import logging
+import sys
+import os
+
+import tempfile
+import pytest
+
+import fiona
+
+
+logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
+
+
+def create_sample_data(filename, driver):
+    meta = {
+        'driver': driver,
+        'schema': {
+            'geometry': 'Point',
+            'properties': {}
+        }
+    }
+    with fiona.open(filename, 'w', **meta) as dst:
+        dst.write({
+            'geometry': {
+                'type': 'Point',
+                'coordinates': (0, 0),
+            },
+            'properties': {},
+        })
+    assert(os.path.exists(filename))
+
+
+def test_remove(tmpdir=None):
+    if tmpdir is None:
+        tmpdir = tempfile.mkdtemp()
+    filename_shp = os.path.join(tmpdir, 'test.shp')
+    
+    create_sample_data(filename_shp, driver='ESRI Shapefile')
+    fiona.remove(filename_shp, driver='ESRI Shapefile')
+    assert(not os.path.exists(filename_shp))
+    
+    with pytest.raises(RuntimeError):
+        fiona.remove(filename_shp, driver='ESRI Shapefile')
+
+def test_remove_driver(tmpdir=None):
+    if tmpdir is None:
+        tmpdir = tempfile.mkdtemp()
+    filename_shp = os.path.join(tmpdir, 'test.shp')
+    filename_json = os.path.join(tmpdir, 'test.json')
+        
+    create_sample_data(filename_shp, driver='ESRI Shapefile')
+    create_sample_data(filename_json, driver='GeoJSON')
+    fiona.remove(filename_json, driver='GeoJSON')
+    assert(not os.path.exists(filename_json))
+    assert(os.path.exists(filename_shp))
+
+def test_remove_collection(tmpdir=None):
+    if tmpdir is None:
+        tmpdir = tempfile.mkdtemp()
+    filename_shp = os.path.join(tmpdir, 'test.shp')
+    
+    create_sample_data(filename_shp, driver='ESRI Shapefile')
+    collection = fiona.open(filename_shp, 'r')
+    fiona.remove(collection)
+    assert(not os.path.exists(filename_shp))
+
+def test_remove_path_without_driver(tmpdir=None):
+    if tmpdir is None:
+        tmpdir = tempfile.mkdtemp()
+    filename_shp = os.path.join(tmpdir, 'test.shp')
+
+    create_sample_data(filename_shp, driver='ESRI Shapefile')
+
+    with pytest.raises(Exception):
+        fiona.remove(filename_shp)
+
+    assert(os.path.exists(filename_shp))
diff --git a/tests/test_rfc3339.py b/tests/test_rfc3339.py
index f76c2a1..131eca6 100644
--- a/tests/test_rfc3339.py
+++ b/tests/test_rfc3339.py
@@ -13,7 +13,7 @@ logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
 class DateParseTest(unittest.TestCase):
 
     def test_yyyymmdd(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             parse_date("2012-01-29"), (2012, 1, 29, 0, 0, 0, 0.0))
 
     def test_error(self):
@@ -22,24 +22,24 @@ class DateParseTest(unittest.TestCase):
 class TimeParseTest(unittest.TestCase):
     
     def test_hhmmss(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             parse_time("10:11:12"), (0, 0, 0, 10, 11, 12, 0.0))
 
     def test_hhmm(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             parse_time("10:11"), (0, 0, 0, 10, 11, 0, 0.0))
 
     def test_hhmmssff(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             parse_time("10:11:12.42"), 
             (0, 0, 0, 10, 11, 12, 0.42*1000000.0))
 
     def test_hhmmssz(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             parse_time("10:11:12Z"), (0, 0, 0, 10, 11, 12, 0.0))
 
     def test_hhmmssoff(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             parse_time("10:11:12-01:00"), (0, 0, 0, 10, 11, 12, 0.0))
 
     def test_error(self):
@@ -48,7 +48,7 @@ class TimeParseTest(unittest.TestCase):
 class DatetimeParseTest(unittest.TestCase):
     
     def test_yyyymmdd(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             parse_datetime("2012-01-29T10:11:12"), 
             (2012, 1, 29, 10, 11, 12, 0.0))
 
diff --git a/tests/test_unicode.py b/tests/test_unicode.py
index 6696567..6def0d3 100644
--- a/tests/test_unicode.py
+++ b/tests/test_unicode.py
@@ -7,10 +7,12 @@ import sys
 import tempfile
 import unittest
 
+import pytest
 import six
 
 import fiona
 
+
 logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
 
 
@@ -55,20 +57,71 @@ class UnicodeStringFieldTest(unittest.TestCase):
     def tearDown(self):
         shutil.rmtree(self.tempdir)
 
-    def test_write(self):
+    @pytest.mark.xfail(reason="OGR silently fails to convert strings")
+    def test_write_mismatch(self):
+        """TOFIX: OGR silently fails to convert strings"""
+        # Details:
+        #
+        # If we tell OGR that we want a latin-1 encoded output file and
+        # give it a feature with a unicode property that can't be converted
+        # to latin-1, no error is raised and OGR just writes the utf-8
+        # encoded bytes to the output file.
+        #
+        # This might be shapefile specific.
+        #
+        # Consequences: no error on write, but there will be an error
+        # on reading the data and expecting latin-1.
+        schema = {
+            'geometry': 'Point',
+            'properties': {'label': 'str', 'num': 'int'}}
+
+        with fiona.open(os.path.join(self.tempdir, "test-write-fail.shp"),
+                        'w', driver="ESRI Shapefile", schema=schema,
+                        encoding='latin1') as c:
+            c.writerecords([{
+                'type': 'Feature',
+                'geometry': {'type': 'Point', 'coordinates': [0, 0]},
+                'properties': {
+                    'label': u'徐汇区',
+                    'num': 0}}])
+
+        with fiona.open(os.path.join(self.tempdir), encoding='latin1') as c:
+            f = next(c)
+            # Next assert fails.
+            self.assertEquals(f['properties']['label'], u'徐汇区')
+
+    def test_write_utf8(self):
         schema = {
             'geometry': 'Point',
             'properties': {'label': 'str', u'verit\xe9': 'int'}}
         with fiona.open(os.path.join(self.tempdir, "test-write.shp"),
                         "w", "ESRI Shapefile", schema=schema,
                         encoding='utf-8') as c:
-            c.writerecords([
-                {'type': 'Feature', 'geometry': {'type': 'Point',
-                                                 'coordinates': [0, 0]},
-                                    'properties': {'label': u'Ba\u2019kelalan',
-                                                   u'verit\xe9': 0}}])
+            c.writerecords([{
+                'type': 'Feature',
+                'geometry': {'type': 'Point', 'coordinates': [0, 0]},
+                'properties': {
+                    'label': u'Ba\u2019kelalan', u'verit\xe9': 0}}])
 
-        with fiona.open(os.path.join(self.tempdir)) as c:
+        with fiona.open(os.path.join(self.tempdir), encoding='utf-8') as c:
             f = next(c)
             self.assertEquals(f['properties']['label'], u'Ba\u2019kelalan')
             self.assertEquals(f['properties'][u'verit\xe9'], 0)
+
+    def test_write_gb18030(self):
+        """Can write a simplified Chinese shapefile"""
+        schema = {
+            'geometry': 'Point',
+            'properties': {'label': 'str', 'num': 'int'}}
+        with fiona.open(os.path.join(self.tempdir, "test-write-gb18030.shp"),
+                        'w', driver="ESRI Shapefile", schema=schema,
+                        encoding='gb18030') as c:
+            c.writerecords([{
+                'type': 'Feature',
+                'geometry': {'type': 'Point', 'coordinates': [0, 0]},
+                'properties': {'label': u'徐汇区', 'num': 0}}])
+
+        with fiona.open(os.path.join(self.tempdir), encoding='gb18030') as c:
+            f = next(c)
+            self.assertEquals(f['properties']['label'], u'徐汇区')
+            self.assertEquals(f['properties']['num'], 0)
diff --git a/tests/test_vfs.py b/tests/test_vfs.py
index 0687aa1..ce33980 100644
--- a/tests/test_vfs.py
+++ b/tests/test_vfs.py
@@ -21,10 +21,10 @@ class VsiReadingTest(ReadingTest):
 
     def test_filter_vsi(self):
         results = list(self.c.filter(bbox=(-114.0, 35.0, -104, 45.0)))
-        self.failUnlessEqual(len(results), 67)
+        self.assertEqual(len(results), 67)
         f = results[0]
-        self.failUnlessEqual(f['id'], "0")
-        self.failUnlessEqual(f['properties']['STATE'], 'UT')
+        self.assertEqual(f['id'], "0")
+        self.assertEqual(f['properties']['STATE'], 'UT')
 
 class ZipReadingTest(VsiReadingTest):
     
@@ -35,20 +35,20 @@ class ZipReadingTest(VsiReadingTest):
         self.c.close()
 
     def test_open_repr(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<open Collection '/vsizip/tests/data/coutwildrnp.zip:coutwildrnp', mode 'r' "
             "at %s>" % hex(id(self.c))))
 
     def test_closed_repr(self):
         self.c.close()
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<closed Collection '/vsizip/tests/data/coutwildrnp.zip:coutwildrnp', mode 'r' "
             "at %s>" % hex(id(self.c))))
 
     def test_path(self):
-        self.failUnlessEqual(self.c.path, '/vsizip/tests/data/coutwildrnp.zip')
+        self.assertEqual(self.c.path, '/vsizip/tests/data/coutwildrnp.zip')
 
 class ZipArchiveReadingTest(VsiReadingTest):
     
@@ -59,20 +59,20 @@ class ZipArchiveReadingTest(VsiReadingTest):
         self.c.close()
 
     def test_open_repr(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<open Collection '/vsizip/tests/data/coutwildrnp.zip/coutwildrnp.shp:coutwildrnp', mode 'r' "
             "at %s>" % hex(id(self.c))))
 
     def test_closed_repr(self):
         self.c.close()
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<closed Collection '/vsizip/tests/data/coutwildrnp.zip/coutwildrnp.shp:coutwildrnp', mode 'r' "
             "at %s>" % hex(id(self.c))))
 
     def test_path(self):
-        self.failUnlessEqual(self.c.path, '/vsizip/tests/data/coutwildrnp.zip/coutwildrnp.shp')
+        self.assertEqual(self.c.path, '/vsizip/tests/data/coutwildrnp.zip/coutwildrnp.shp')
 
 class TarArchiveReadingTest(VsiReadingTest):
     
@@ -83,18 +83,18 @@ class TarArchiveReadingTest(VsiReadingTest):
         self.c.close()
 
     def test_open_repr(self):
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<open Collection '/vsitar/tests/data/coutwildrnp.tar/testing/coutwildrnp.shp:coutwildrnp', mode 'r' "
             "at %s>" % hex(id(self.c))))
 
     def test_closed_repr(self):
         self.c.close()
-        self.failUnlessEqual(
+        self.assertEqual(
             repr(self.c),
             ("<closed Collection '/vsitar/tests/data/coutwildrnp.tar/testing/coutwildrnp.shp:coutwildrnp', mode 'r' "
             "at %s>" % hex(id(self.c))))
 
     def test_path(self):
-        self.failUnlessEqual(self.c.path, '/vsitar/tests/data/coutwildrnp.tar/testing/coutwildrnp.shp')
+        self.assertEqual(self.c.path, '/vsitar/tests/data/coutwildrnp.tar/testing/coutwildrnp.shp')
 

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/fiona.git



More information about the Pkg-grass-devel mailing list