[imposm] 35/47: Imported Upstream version 2.5.0

Sebastiaan Couwenberg sebastic at moszumanska.debian.org
Fri Mar 13 19:07:39 UTC 2015


This is an automated email from the git hooks/post-receive script.

sebastic pushed a commit to branch master
in repository imposm.

commit 03271ca7a8799c0c9e21408cd32d97ba926496ba
Author: David Paleino <dapal at debian.org>
Date:   Mon Dec 10 19:23:45 2012 +0100

    Imported Upstream version 2.5.0
---
 CHANGES                              |  17 +-
 MANIFEST.in                          |   1 +
 PKG-INFO                             |  23 +-
 README => README.rst                 |   2 -
 imposm.egg-info/PKG-INFO             |  23 +-
 imposm.egg-info/SOURCES.txt          |  14 +-
 imposm/app.py                        | 102 +++++--
 imposm/cache/tc_tmp.py               | 577 -----------------------------------
 imposm/db/postgis.py                 | 257 +++++++++++-----
 imposm/dbimporter.py                 | 175 ++++++++---
 imposm/geom.py                       | 272 ++++++++++++++++-
 imposm/mapping.py                    | 134 ++++----
 imposm/psqldb.py                     |  66 ++--
 imposm/reader.py                     |  32 +-
 imposm/test/test_dbimporter.py       | 100 ++++++
 imposm/test/test_geom.py             |  33 --
 imposm/test/test_imported.py         |  27 +-
 imposm/test/test_limit_to.py         |  85 ++++++
 imposm/{util.py => util/__init__.py} |  57 ++--
 imposm/util/geom.py                  | 144 +++++++++
 imposm/util/lib.py                   | 107 +++++++
 imposm/util/ogr.py                   | 130 ++++++++
 imposm/version.py                    |   2 +-
 imposm/writer.py                     |  25 +-
 setup.py                             |   2 +-
 25 files changed, 1457 insertions(+), 950 deletions(-)

diff --git a/CHANGES b/CHANGES
index 67844a6..377e331 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,7 +1,20 @@
 Changelog
 ---------
 
-2.4.0 2012-xx-xx
+2.5.0 2012-12-06
+~~~~~~~~~~~~~~~~
+
+- PostGIS 2 support
+- new --limit-to option to limit imports to polygons
+- added --quiet option that only logs progress once per minute
+- add StringIndex and Index mappings for PostgreSQL
+- always drop tables _and_ views with same name before creating new
+  table/view. allows to change mappings from views to tables and
+  vice versa.
+- internal refactoring to make support for non SQL databases easier
+
+
+2.4.0 2012-03-30
 ~~~~~~~~~~~~~~~~
 
 - new Class and Type field types
@@ -39,7 +52,7 @@ Changelog
 - update SRS in GeneralizedTables and UnionTables
 - new waterareas_gen0|1 in default style
 - new area field in landusages table
-- new meter_to_mapunit function to use same mapping 
+- new meter_to_mapunit function to use same mapping
   for EPSG:4326 and projected SRS
 
 2.2.0 2011-06-01
diff --git a/MANIFEST.in b/MANIFEST.in
index 0411940..2f5ff17 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,5 @@
 include setup.py
+include README.rst
 include LICENSE
 include CHANGES
 include imposm/900913.sql
diff --git a/PKG-INFO b/PKG-INFO
index ddfdd06..5ce96c2 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,14 +1,12 @@
 Metadata-Version: 1.0
 Name: imposm
-Version: 2.4.0
+Version: 2.5.0
 Summary: OpenStreetMap importer for PostGIS.
 Home-page: http://imposm.org/
 Author: Oliver Tonnhofer
 Author-email: olt at omniscale.de
 License: Apache Software License 2.0
-Description: .. # -*- restructuredtext -*-
-        
-        Imposm is an importer for OpenStreetMap data. It reads XML and PBF files and
+Description: Imposm is an importer for OpenStreetMap data. It reads XML and PBF files and
         can import the data into PostgreSQL/PostGIS databases.
         
         It is designed to create databases that are optimized for rendering/WMS
@@ -23,7 +21,20 @@ Description: .. # -*- restructuredtext -*-
         Changelog
         ---------
         
-        2.4.0 2012-xx-xx
+        2.5.0 2012-12-06
+        ~~~~~~~~~~~~~~~~
+        
+        - PostGIS 2 support
+        - new --limit-to option to limit imports to polygons
+        - added --quiet option that only logs progress once per minute
+        - add StringIndex and Index mappings for PostgreSQL
+        - always drop tables _and_ views with same name before creating new
+          table/view. allows to change mappings from views to tables and
+          vice versa.
+        - internal refactoring to make support for non SQL databases easier
+        
+        
+        2.4.0 2012-03-30
         ~~~~~~~~~~~~~~~~
         
         - new Class and Type field types
@@ -61,7 +72,7 @@ Description: .. # -*- restructuredtext -*-
         - update SRS in GeneralizedTables and UnionTables
         - new waterareas_gen0|1 in default style
         - new area field in landusages table
-        - new meter_to_mapunit function to use same mapping 
+        - new meter_to_mapunit function to use same mapping
           for EPSG:4326 and projected SRS
         
         2.2.0 2011-06-01
diff --git a/README b/README.rst
similarity index 93%
rename from README
rename to README.rst
index 21f5e77..7ef4fe3 100644
--- a/README
+++ b/README.rst
@@ -1,5 +1,3 @@
-.. # -*- restructuredtext -*-
-
 Imposm is an importer for OpenStreetMap data. It reads XML and PBF files and
 can import the data into PostgreSQL/PostGIS databases.
 
diff --git a/imposm.egg-info/PKG-INFO b/imposm.egg-info/PKG-INFO
index ddfdd06..5ce96c2 100644
--- a/imposm.egg-info/PKG-INFO
+++ b/imposm.egg-info/PKG-INFO
@@ -1,14 +1,12 @@
 Metadata-Version: 1.0
 Name: imposm
-Version: 2.4.0
+Version: 2.5.0
 Summary: OpenStreetMap importer for PostGIS.
 Home-page: http://imposm.org/
 Author: Oliver Tonnhofer
 Author-email: olt at omniscale.de
 License: Apache Software License 2.0
-Description: .. # -*- restructuredtext -*-
-        
-        Imposm is an importer for OpenStreetMap data. It reads XML and PBF files and
+Description: Imposm is an importer for OpenStreetMap data. It reads XML and PBF files and
         can import the data into PostgreSQL/PostGIS databases.
         
         It is designed to create databases that are optimized for rendering/WMS
@@ -23,7 +21,20 @@ Description: .. # -*- restructuredtext -*-
         Changelog
         ---------
         
-        2.4.0 2012-xx-xx
+        2.5.0 2012-12-06
+        ~~~~~~~~~~~~~~~~
+        
+        - PostGIS 2 support
+        - new --limit-to option to limit imports to polygons
+        - added --quiet option that only logs progress once per minute
+        - add StringIndex and Index mappings for PostgreSQL
+        - always drop tables _and_ views with same name before creating new
+          table/view. allows to change mappings from views to tables and
+          vice versa.
+        - internal refactoring to make support for non SQL databases easier
+        
+        
+        2.4.0 2012-03-30
         ~~~~~~~~~~~~~~~~
         
         - new Class and Type field types
@@ -61,7 +72,7 @@ Description: .. # -*- restructuredtext -*-
         - update SRS in GeneralizedTables and UnionTables
         - new waterareas_gen0|1 in default style
         - new area field in landusages table
-        - new meter_to_mapunit function to use same mapping 
+        - new meter_to_mapunit function to use same mapping
           for EPSG:4326 and projected SRS
         
         2.2.0 2011-06-01
diff --git a/imposm.egg-info/SOURCES.txt b/imposm.egg-info/SOURCES.txt
index 241add1..c5a8d44 100644
--- a/imposm.egg-info/SOURCES.txt
+++ b/imposm.egg-info/SOURCES.txt
@@ -1,7 +1,7 @@
 CHANGES
 LICENSE
 MANIFEST.in
-README
+README.rst
 internal.proto
 setup.cfg
 setup.py
@@ -18,7 +18,6 @@ imposm/merge.py
 imposm/multipolygon.py
 imposm/psqldb.py
 imposm/reader.py
-imposm/util.py
 imposm/version.py
 imposm/writer.py
 imposm.egg-info/PKG-INFO
@@ -30,18 +29,21 @@ imposm.egg-info/requires.txt
 imposm.egg-info/top_level.txt
 imposm/cache/__init__.py
 imposm/cache/internal.cc
-imposm/cache/kc.c
 imposm/cache/osm.py
 imposm/cache/tc.c
 imposm/cache/tc.pyx
-imposm/cache/tc_tmp.py
 imposm/db/__init__.py
 imposm/db/config.py
 imposm/db/postgis.py
 imposm/test/__init__.py
 imposm/test/test_cache.py
+imposm/test/test_dbimporter.py
 imposm/test/test_field_types.py
-imposm/test/test_geom.py
 imposm/test/test_imported.py
+imposm/test/test_limit_to.py
 imposm/test/test_multipolygon.py
-imposm/test/test_tag_mapper.py
\ No newline at end of file
+imposm/test/test_tag_mapper.py
+imposm/util/__init__.py
+imposm/util/geom.py
+imposm/util/lib.py
+imposm/util/ogr.py
\ No newline at end of file
diff --git a/imposm/app.py b/imposm/app.py
index f6124ba..ea3b250 100644
--- a/imposm/app.py
+++ b/imposm/app.py
@@ -1,11 +1,11 @@
 # Copyright 2011 Omniscale (http://omniscale.com)
-# 
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #     http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -44,6 +44,7 @@ from imposm.db.config import DB
 from imposm.cache import OSMCache
 from imposm.reader import ImposmReader
 from imposm.mapping import TagMapper
+from imposm.geom import load_geom
 
 try:
     n_cpu = multiprocessing.cpu_count()
@@ -73,7 +74,9 @@ def main(argv=None):
         default=False, help='show this help message and exit')
     parser.add_option('--debug', action='store_true',
         default=False, help='show debug information')
-    
+    parser.add_option('--quiet', action='store_true',
+        default=False, help='only print progress every 60 seconds')
+
     parser.add_option('-m', '--mapping-file', dest='mapping_file',
         metavar='<file>')
     parser.add_option('-h', '--host', dest='host', metavar='<host>')
@@ -84,7 +87,7 @@ def main(argv=None):
     parser.add_option('--connection', dest='connection',
         help="connection string like postgis://user:pass@host:port/database,"
              " this overwrites the -h/-p/-d/-U options")
-    
+
     parser.add_option('-c', '--concurrency', dest='concurrency', metavar='N',
                       type='int', default=n_cpu)
 
@@ -94,8 +97,8 @@ def main(argv=None):
         action='store_true')
     parser.add_option('--cache-dir', dest='cache_dir', default='.',
         help="path where node/ways/relations should be cached [current working dir]")
-    
-    
+
+
     parser.add_option('--table-prefix',
         dest='table_prefix', default=None, metavar='osm_new_',
         help='prefix for imported tables')
@@ -122,22 +125,35 @@ def main(argv=None):
         'move backup tables to production tables')
     parser.add_option('--remove-backup-tables', dest='remove_backup_tables', default=False,
         action='store_true')
-        
 
     parser.add_option('-n', '--dry-run', dest='dry_run', default=False,
         action='store_true')
 
+    parser.add_option('--limit-to', dest='limit_to', metavar='file',
+        help='limit imported geometries to (multi)polygons in EPSG:4326')
+
     (options, args) = parser.parse_args(argv)
 
     setup_logging(debug=options.debug)
 
-    if (argv and len(argv) == 0) or len(sys.argv) == 1:
+    if (argv and len(argv) == 0) or (not argv and len(sys.argv) == 1):
+        options.help = True
+
+    if not any([options.read, options.write, options.optimize, options.deploy_tables,
+        options.recover_tables, options.remove_backup_tables]):
         options.help = True
 
     if options.help:
         parser.print_help()
         sys.exit(1)
-    
+
+    if options.quiet:
+        logger = imposm.util.QuietProgressLog
+        logger_parser = imposm.util.QuietParserProgress
+    else:
+        logger = imposm.util.ProgressLog
+        logger_parser = imposm.util.ParserProgress
+
     if options.proj:
         if ':' not in options.proj:
             print 'ERROR: --proj should be in EPSG:00000 format'
@@ -145,31 +161,48 @@ def main(argv=None):
         # check proj if meter_to_mapunit needs to do anything
         if options.proj.lower() == 'epsg:4326':
             imposm.mapping.import_srs_is_geographic = True
-    
+
     mapping_file = os.path.join(os.path.dirname(__file__),
         'defaultmapping.py')
     if options.mapping_file:
         print 'loading %s as mapping' % options.mapping_file
         mapping_file = options.mapping_file
 
+    polygon = None
+    if options.limit_to:
+        logger.message('## reading --limit-to %s' % options.limit_to)
+        polygon_timer = imposm.util.Timer('reading', logger)
+        polygon = load_geom(options.limit_to)
+        polygon_timer.stop()
+        if polygon is None:
+            print 'ERROR: No valid polygon/multipolygon found'
+            sys.exit(1)
+
     mappings = {}
     execfile(mapping_file, mappings)
-    tag_mapping = TagMapper([m for n, m in mappings.iteritems() 
-        if isinstance(m, imposm.mapping.Mapping)])
+    tag_mapping = TagMapper([m for n, m in mappings.iteritems()
+        if isinstance(m, imposm.mapping.Mapping)], limit_to=polygon)
 
     if 'IMPOSM_MULTIPOLYGON_REPORT' in os.environ:
         imposm.config.imposm_multipolygon_report = float(os.environ['IMPOSM_MULTIPOLYGON_REPORT'])
     if 'IMPOSM_MULTIPOLYGON_MAX_RING' in os.environ:
         imposm.config.imposm_multipolygon_max_ring = int(os.environ['IMPOSM_MULTIPOLYGON_MAX_RING'])
 
+    if options.table_prefix:
+        options.table_prefix = options.table_prefix.rstrip('_') + '_'
+    if options.table_prefix_production:
+        options.table_prefix_production = options.table_prefix_production.rstrip('_') + '_'
+    if options.table_prefix_backup:
+        options.table_prefix_backup = options.table_prefix_backup.rstrip('_') + '_'
+
     if (options.write or options.optimize or options.deploy_tables
         or options.remove_backup_tables or options.recover_tables):
         db_conf = mappings['db_conf']
         if options.table_prefix:
             db_conf.prefix = options.table_prefix
         else:
-            options.table_prefix = db_conf.prefix
-        
+            options.table_prefix = db_conf.prefix.rstrip('_') + '_'
+
         if options.connection:
             from imposm.db.config import db_conf_from_string
             db_conf = db_conf_from_string(options.connection, db_conf)
@@ -183,14 +216,12 @@ def main(argv=None):
             if options.user:
                 from getpass import getpass
                 db_conf.password = getpass('password for %(user)s at %(host)s:' % db_conf)
-        
+
         if options.proj:
             db_conf.proj = options.proj
-    
-    logger = imposm.util.ProgressLog
-    
+
     imposm_timer = imposm.util.Timer('imposm', logger)
-    
+
     if options.read:
         if not options.merge_cache:
             cache_files = glob.glob(os.path.join(options.cache_dir, 'imposm_*.cache'))
@@ -204,18 +235,18 @@ def main(argv=None):
                     sys.exit(2)
                 for cache_file in cache_files:
                     os.unlink(cache_file)
-    
+
     cache = OSMCache(options.cache_dir)
-    
+
     if options.read:
         read_timer = imposm.util.Timer('reading', logger)
-        
+
         if not args:
             print "no file(s) supplied"
             sys.exit(2)
 
         reader = ImposmReader(tag_mapping, cache=cache, merge=options.merge_cache,
-            pool_size=options.concurrency, logger=logger)
+            pool_size=options.concurrency, logger=logger_parser)
         reader.estimated_coords = imposm.util.estimate_records(args)
         for arg in args:
             logger.message('## reading %s' % arg)
@@ -225,7 +256,7 @@ def main(argv=None):
     if options.write:
         db = DB(db_conf)
         write_timer = imposm.util.Timer('writing', logger)
-        
+
         logger.message('## dropping/creating tables')
         if not options.dry_run:
             db.create_tables(tag_mapping.mappings)
@@ -237,7 +268,7 @@ def main(argv=None):
             db.create_views(mappings, ignore_errors=True)
             db.commit()
 
-        writer = ImposmWriter(tag_mapping, db, cache=cache, 
+        writer = ImposmWriter(tag_mapping, db, cache=cache,
             pool_size=options.concurrency, logger=logger,
             dry_run=options.dry_run)
         writer.relations()
@@ -251,14 +282,19 @@ def main(argv=None):
             generalized_timer = imposm.util.Timer('generalizing tables', logger)
             db.create_generalized_tables(mappings)
             generalized_timer.stop()
-            
+
             logger.message('## creating union views')
             view_timer = imposm.util.Timer('creating views', logger)
             db.create_views(mappings)
             view_timer.stop()
 
+            logger.message('## creating geometry indexes')
+            index_timer = imposm.util.Timer('creating indexes', logger)
+            db.post_insert(mappings);
+            index_timer.stop()
+
             db.commit()
-        
+
         write_timer.stop()
 
     if options.optimize:
@@ -267,27 +303,27 @@ def main(argv=None):
         logger.message('## optimizing tables')
         db.optimize(mappings)
         optimize_timer.stop()
-    
+
     if options.recover_tables:
         assert not options.deploy_tables, ('cannot swap and recover production '
             'tables at the same time')
         options.table_prefix, options.table_prefix_backup = \
             options.table_prefix_backup, options.table_prefix
-        
+
     if options.deploy_tables or options.recover_tables:
         db = DB(db_conf)
-        db.swap_tables(options.table_prefix, 
+        db.swap_tables(options.table_prefix,
             options.table_prefix_production, options.table_prefix_backup)
         db.remove_views(options.table_prefix)
         db.db_conf.prefix = options.table_prefix_production
         db.create_views(mappings)
         db.commit()
-    
+
     if options.remove_backup_tables:
         db = DB(db_conf)
         db.remove_tables(options.table_prefix_backup)
         db.commit()
-    
+
     imposm_timer.stop()
 
 if __name__ == '__main__':
diff --git a/imposm/cache/tc_tmp.py b/imposm/cache/tc_tmp.py
deleted file mode 100644
index bfea081..0000000
--- a/imposm/cache/tc_tmp.py
+++ /dev/null
@@ -1,577 +0,0 @@
-from imposm.base import Node, Way, Relation
-import sys
-import os
-
-import marshal
-
-from ctypes import CDLL
-from ctypes.util import find_library as _find_library
-from ctypes import (
-   c_void_p,
-   c_char_p,
-   c_int,
-   c_double,
-   c_long,
-   c_int64,
-   c_uint32,
-   byref,
-   cast,
-   Structure,
-   POINTER,
-   pointer,
-   create_string_buffer,
-   addressof,
-   sizeof,
-   memmove,
-   string_at,
-   CFUNCTYPE,
-)
-
-default_locations = dict(
-    darwin=dict(
-        paths = ['/opt/local/lib'],
-        exts = ['.dylib'],
-    ),
-    win32=dict(
-        paths = [os.path.dirname(os.__file__) + '/../../../DLLs'],
-        exts = ['.dll']
-    )
-)
-
-#additional_lib_path = os.environ.get('MAPPROXY_LIB_PATH')
-#if additional_lib_path:
-#    additional_lib_path = additional_lib_path.split(os.pathsep)
-#    additional_lib_path.reverse()
-#    for locs in default_locations.values():
-#        for path in additional_lib_path:
-#            locs['paths'].insert(0, path)
-#
-def load_library(lib_names, locations_conf=default_locations):
-    """
-    Load the `lib_name` library with ctypes.
-    If ctypes.util.find_library does not find the library,
-    different path and filename extensions will be tried.
-    
-    Retruns the loaded library or None.
-    """
-    if isinstance(lib_names, basestring):
-        lib_names = [lib_names]
-    
-    for lib_name in lib_names:
-        lib = load_library_(lib_name, locations_conf)
-        if lib is not None: return lib
-
-def load_library_(lib_name, locations_conf=default_locations):
-    lib_path = find_library(lib_name)
-    
-    if lib_path:
-        return CDLL(lib_path)
-    
-    if sys.platform in locations_conf:
-        paths = locations_conf[sys.platform]['paths']
-        exts = locations_conf[sys.platform]['exts']
-        lib_path = find_library(lib_name, paths, exts)
-    
-    if lib_path:
-        return CDLL(lib_path)
-        
-
-def find_library(lib_name, paths=None, exts=None):
-    """
-    Search for library in all permutations of `paths` and `exts`.
-    If nothing is found None is returned.
-    """
-    if not paths or not exts:
-        lib = _find_library(lib_name)
-        if lib is None and lib_name.startswith('lib'):
-            lib = _find_library(lib_name[3:])
-        return lib
-    
-    for lib_name in [lib_name] + ([lib_name[3:]] if lib_name.startswith('lib') else []):
-        for path in paths:
-            for ext in exts:
-                lib_path = os.path.join(path, lib_name + ext)
-                if os.path.exists(lib_path):
-                    return lib_path
-    
-    return None
-    
-#cdef extern from "Python.h":
-#    object PyString_FromStringAndSize(char *s, Py_ssize_t len)
-#
-#cdef extern from "marshal.h":
-#    object PyMarshal_ReadObjectFromString(char *string, Py_ssize_t len)
-#    object PyMarshal_WriteObjectToString(object value, int version)
-#
-
-TCBDB = c_void_p
-BDBCUR = c_void_p
-TCCMP = CFUNCTYPE(c_int, c_char_p, c_int, c_char_p, c_int, c_void_p)
-
-def init_libtokyo_util():
-    libtokyocabinet = load_library('libtokyocabinet')
-    
-    if libtokyocabinet is None: return
-    
-    #???
-    #libtokyocabinet.TCCMP.argtypes = []
-    #libtokyocabinet.TCCMP.restype = c_int
-    #
-    libtokyocabinet.tccmpint32.argtypes = []
-    libtokyocabinet.tccmpint32.restype = c_int
-    
-    libtokyocabinet.tccmpint64.argtypes = []
-    libtokyocabinet.tccmpint64.restype = c_long
-    
-    libtokyocabinet.tcbdbnew.argtypes = []
-    libtokyocabinet.tcbdbnew.restype = TCBDB
-    
-    libtokyocabinet.tcbdbdel.argtypes = [TCBDB]
-    libtokyocabinet.tcbdbdel.restype = None
-    
-    libtokyocabinet.tcbdbecode.argtypes = [TCBDB]
-    libtokyocabinet.tcbdbecode.restype = c_int
-    
-    libtokyocabinet.tcbdbtune.argtypes = [TCBDB, c_int, c_int, c_int, c_int, c_int, c_int]
-    libtokyocabinet.tcbdbtune.restype = c_int
-    
-    libtokyocabinet.tcbdbsetcache.argtypes = [TCBDB, c_int, c_int]
-    libtokyocabinet.tcbdbsetcache.restype = c_int
-    
-    CMPFUNC = CFUNCTYPE(c_long)
-    libtokyocabinet.tcbdbsetcmpfunc.argtypes = [TCBDB, c_void_p, c_void_p]
-    libtokyocabinet.tcbdbsetcmpfunc.restype = c_int
-    
-    libtokyocabinet.tcbdbsetmutex.argtypes = [TCBDB]
-    libtokyocabinet.tcbdbsetmutex.restype = c_int
-    
-    libtokyocabinet.tcbdbopen.argtypes = [TCBDB, c_char_p, c_int]
-    libtokyocabinet.tcbdbopen.restype = c_int
-    
-    libtokyocabinet.tcbdbclose.argtypes = [TCBDB]
-    libtokyocabinet.tcbdbclose.restype = c_int
-    
-    libtokyocabinet.tcbdbput.argtypes = [TCBDB, c_void_p, c_int, c_void_p, c_int]
-    libtokyocabinet.tcbdbput.restype = c_int
-    
-    libtokyocabinet.tcbdbget.argtypes = [TCBDB, c_void_p, c_int, POINTER(c_int)]
-    libtokyocabinet.tcbdbget.restype = c_void_p
-    
-    libtokyocabinet.tcbdbget3.argtypes = [TCBDB, c_void_p, c_int, POINTER(c_int)]
-    libtokyocabinet.tcbdbget3.restype = c_void_p
-    
-    libtokyocabinet.tcbdbrnum.argtypes = [TCBDB]
-    libtokyocabinet.tcbdbrnum.restype = c_long
-    
-    libtokyocabinet.tcbdbcurnew.argtypes = [TCBDB]
-    libtokyocabinet.tcbdbcurnew.restypes = BDBCUR
-    
-    libtokyocabinet.tcbdbcurdel.argtypes = [BDBCUR]
-    libtokyocabinet.tcbdbcurdel.restypes = None
-    
-    libtokyocabinet.tcbdbcurfirst.argtypes = [BDBCUR]
-    libtokyocabinet.tcbdbcurfirst.restypes = c_int
-    
-    libtokyocabinet.tcbdbcurnext.argtypes = [BDBCUR]
-    libtokyocabinet.tcbdbcurnext.restypes = c_int
-    
-    libtokyocabinet.tcbdbcurkey3.argtypes = [BDBCUR, POINTER(c_int)]
-    libtokyocabinet.tcbdbcurkey3.restypes = c_void_p
-    
-    libtokyocabinet.tcbdbcurval3.argtypes = [BDBCUR, POINTER(c_int)]
-    libtokyocabinet.tcbdbcurval3.restypes = c_void_p
-    
-    return libtokyocabinet
-
-libtokyocabinet = init_libtokyo_util()
-
-BDBFOPEN = 1 << 0
-BDBFFATAL = 1 << 1
-
-BDBOREADER = 1 << 0 # /* open as a reader */
-BDBOWRITER = 1 << 1 # /* open as a writer */
-BDBOCREAT = 1 << 2  # /* writer creating */
-BDBOTRUNC = 1 << 3  # /* writer truncating */
-BDBONOLCK = 1 << 4  # /* open without locking */
-BDBOLCKNB = 1 << 5  # /* lock without blocking */
-BDBOTSYNC = 1 << 6  # /* synchronize every transaction */
-
-BDBTLARGE = 1 << 0  # /* use 64-bit bucket array */
-BDBTDEFLATE = 1 << 1 # /* compress each page with Deflate */
-BDBTBZIP = 1 << 2   # /* compress each record with BZIP2 */
-BDBTTCBS = 1 << 3   # /* compress each page with TCBS */
-BDBTEXCODEC = 1 << 4 # /* compress each record with outer functions */
-
-COORD_FACTOR = 11930464.7083 # ((2<<31)-1)/360.0
-
-def _coord_to_uint32(x):
-    return int((x + 180.0) * COORD_FACTOR)
-
-def _uint32_to_coord(x):
-    return ((x / COORD_FACTOR) - 180.0)
-
-def coord_struct(x, y):
-    return Coord(x=_coord_to_uint32(x), y=_coord_to_uint32(y))
-
-class Coord(Structure):
-    _fields_ = [("x", c_uint32),
-                ("y", c_uint32)]
-
-_modes = {
-    'w': BDBOWRITER | BDBOCREAT,
-    'r': BDBOREADER | BDBONOLCK,
-}
-
-class BDB(object):
-    def __init__(self, filename, mode='w', estimated_records=0):
-        self.db = libtokyocabinet.tcbdbnew()
-        self._cur = None
-        self._opened = 0
-        self.filename = filename
-        self._tune_db(estimated_records)
-
-        tccmpint64 = TCCMP(('tccmpint64', libtokyocabinet))
-        libtokyocabinet.tcbdbsetcmpfunc(self.db, tccmpint64, None)
-
-        # libtokyocabinet.tcbdbsetcmpfunc(self.db, libtokyocabinet.tccmpint64._get_address(), None)
-        if not libtokyocabinet.tcbdbopen(self.db, filename, _modes[mode]):
-            raise IOError(libtokyocabinet.tcbdbecode(self.db))
-        self._opened = 1
-    
-    def _tune_db(self, estimated_records):
-        if estimated_records:
-            lmemb = 128 # default
-            nmemb = -1
-            fpow = 13 # 2^13 = 8196
-            bnum = int((estimated_records*3)/lmemb)
-            libtokyocabinet.tcbdbtune(self.db, lmemb, nmemb, bnum, 5, fpow, BDBTLARGE | BDBTDEFLATE)
-        else:
-            libtokyocabinet.tcbdbtune(self.db, -1, -1, -1, 5, 13, BDBTLARGE | BDBTDEFLATE)
-    
-    def get(self, osmid):
-        """
-        Return object with given id.
-        Returns None if id is not stored.
-        """
-        ret_size = c_int()
-        ret = libtokyocabinet.tcbdbget3(self.db, byref(c_int64(osmid)), 8, byref(ret_size))
-        if not ret: return None
-        data = string_at(ret, ret_size.value)
-        return self._obj(osmid, marshal.loads(data))
-
-    def get_raw(self, osmid):
-        """
-        Return object with given id.
-        Returns None if id is not stored.
-        """
-        ret_size = c_int()
-        ret = libtokyocabinet.tcbdbget3(self.db, byref(c_int64(osmid)), 8, byref(ret_size))
-        if not ret: return None
-        return string_at(ret, ret_size.value)
-
-    def put(self, osmid, data):
-        return self.put_marshaled(osmid, marshal.dumps(data, 2))
-
-    def put_marshaled(self, osmid, data):
-        return libtokyocabinet.tcbdbput(self.db, byref(c_int64(osmid)), 8, byref(create_string_buffer(data)), len(data))
-
-    def _obj(self, osmid, data):
-        """
-        Create an object from the id and unmarshaled data.
-        Should be overridden by subclasses.
-        """
-        return data
-
-    def __iter__(self):
-        """
-        Return an iterator over the database.
-        Resets any existing iterator.
-        """
-        if self._cur:
-            libtokyocabinet.tcbdbcurdel(self._cur)
-            self._cur = None
-        self._cur = libtokyocabinet.tcbdbcurnew(self.db)
-        print self.db, self._cur
-        #libtokyocabinet.tcbdbcurfirst(self._cur)
-        #if not libtokyocabinet.tcbdbcurfirst(self._cur):
-        #    return iter([])
-        return self
-
-    def __contains__(self, osmid):
-        ret_size = c_int()
-        ret = tcbdbget3(self.db, byref(c_int64(osmid)), 8, byref(ret_size));
-        if ret:
-            return 1
-        else:
-            return 0
-    
-    def __len__(self):
-        return libtokyocabinet.tcbdbrnum(self.db)
-    
-    def next(self):
-        """
-        Return next item as object.
-        """
-        #cdef int64_t osmid
-        if not self._cur: raise StopIteration
-
-        osmid, data = self._get_cur()
-
-        # advance cursor, set to NULL if at the end
-        if libtokyocabinet.tcbdbcurnext(self._cur) == 0:
-            libtokyocabinet.tcbdbcurdel(self._cur)
-            self._cur = None
-        
-        # return objectified item
-        return self._obj(osmid, data)
-
-    def _get_cur(self):
-        """
-        Return the current object at the current cursor position
-        as a tuple of the id and the unmarshaled data.
-        """
-        size = c_int()
-        ret = libtokyocabinet.tcbdbcurkey3(self._cur, byref(size))
-        osmid = byref(ret)[0]
-        ret = libtokyocabinet.tcbdbcurval3(self._cur, byref(size))
-        data = string_at(ret, size.value)
-        value = marshal.loads(data)
-        return osmid, value
-
-    def close(self):
-        if self._opened:
-            libtokyocabinet.tcbdbclose(self.db)
-        self._opened = 0
-    
-    def __dealloc__(self):
-        if self._opened:
-            libtokyocabinet.tcbdbclose(self.db)
-        libtokyocabinet.tcbdbdel(self.db)
-
-class CoordDB(BDB):
-    def put(self, osmid, x, y):
-        return self._put(osmid, x, y)
-    
-    def put_marshaled(self, osmid, x, y):
-        return self._put(osmid, x, y)
-    
-    def _put(self, osmid, x, y):
-        p = coord_struct(x, y)
-        return libtokyocabinet.tcbdbput(self.db, byref(c_int64(osmid)), 8, byref(p), 8)
-
-    def get(self, osmid):
-        ret_size = c_int
-        value = libtokyocabinet.tcbdbget3(self.db, byref(c_int64(osmid)), 8, byref(ret_size))
-        if not value: return
-        coord = Coord()
-        memmove(addressof(coord), value, sizeof(Coord))
-        return _uint32_to_coord(coord.x), _uint32_to_coord(coord.y)
-
-    def get_coords(self, refs):
-        ret_size = c_int()
-        coords = list()
-        for osmid in refs:
-            value = libtokyocabinet.tcbdbget3(self.db, byref(c_int64(osmid)), 8, byref(ret_size))
-            if not value: return
-            coord = Coord()
-            memmove(addressof(coord), value, sizeof(Coord))
-            coords.append((_uint32_to_coord(coord.x), _uint32_to_coord(coord.y)))
-            
-        return coords
-
-    def _get_cur(self):
-        size = c_int()
-        ret = tcbdbcurkey3(self._cur, byref(size))
-        osmid = byref(ret)[0]
-        value = libtokyocabinet.tcbdbcurval3(self._cur, byef(size))
-        return osmid, (_uint32_to_coord(value.x), _uint32_to_coord(value.y))
-
-    def _obj(self, osmid, data):
-        return osmid, data
-
-class NodeDB(BDB):
-    def put(self, osmid, tags, pos):
-        return self.put_marshaled(osmid, marshal.dumps((tags, pos), 2))
-    
-    def put_marshaled(self, osmid, data):
-        return libtokyocabinet.tcbdbput(self.db, byref(c_int64(osmid)), 8, byref(create_string_buffer(data)), len(data))
-
-    def _obj(self, osmid, data):
-        return Node(osmid, data[0], data[1])
-
-class InsertedWayDB(BDB):
-    def put(self, osmid):
-        return libtokyocabinet.tcbdbput(self.db, byref(c_int64(osmid)), 8, 'x', 1);
-
-    def __next__(self):
-        """
-        Return next item as object.
-        """
-        size = None
-        
-        if not self._cur: raise StopIteration
-
-        ret = libtokyocabinet.tcbdbcurkey3(self._cur, byref(size))
-        osmid = byref(ret)[0]
-
-        # advance cursor, set to NULL if at the end
-        if libtokyocabinet.tcbdbcurnext(self._cur) == 0:
-            libtokyocabinet.tcbdbcurdel(self._cur)
-            self._cur = None
-
-        return osmid
-
-class RefTagDB(BDB):
-    """
-    Database for items with references and tags (i.e. ways/relations).
-    """
-    def put(self, osmid, tags, refs):
-        return self.put_marshaled(osmid, marshal.dumps((tags, refs), 2))
-    
-    def put_marshaled(self, osmid, data):
-        return libtokyocabinet.tcbdbput(self.db, byref(c_int64(osmid)), sizeof(int64_t), byref(create_string_buffer(data)), len(data))
-
-class WayDB(RefTagDB):
-    def _obj(self, osmid, data):
-        return Way(osmid, data[0], data[1])
-
-class RelationDB(RefTagDB):
-    def _obj(self, osmid, data):
-        return Relation(osmid, data[0], data[1])
-
-from imposm.cache.internal import DeltaCoords as _DeltaCoords
-from collections import deque
-import bisect
-
-def unzip_nodes(nodes):
-    ids, lons, lats = [], [], []
-    last_id = last_lon = last_lat = 0
-    for id, lon_f, lat_f in nodes:
-        lon = _coord_to_uint32(lon_f)
-        lat = _coord_to_uint32(lat_f)
-        
-        ids.append(id - last_id)
-        lons.append(lon - last_lon)
-        lats.append(lat - last_lat)
-        last_id = id
-        last_lon = lon
-        last_lat = lat
-    
-    return ids, lons, lats
-
-def zip_nodes(ids, lons, lats):
-    nodes = []
-    last_id = last_lon = last_lat = 0
-
-    for i in range(len(ids)):
-        last_id += ids[i]
-        last_lon += lons[i]
-        last_lat += lats[i]
-    
-        nodes.append((
-            last_id,
-            _uint32_to_coord(last_lon),
-            _uint32_to_coord(last_lat)
-        ))
-    return nodes
-
-class DeltaNodes(object):
-    def __init__(self, data=None):
-        self.nodes = []
-        self.changed = False
-        if data:
-            self.deserialize(data)
-    
-    def changed(self):
-        return self.changed
-    
-    def get(self, osmid):
-        i = bisect.bisect(self.nodes, (osmid, ))
-        if i != len(self.nodes) and self.nodes[i][0] == osmid:
-            return self.nodes[i][1:]
-        return None
-    
-    def add(self, osmid, lon, lat):
-        # todo: overwrite
-        self.changed = True
-        if self.nodes and self.nodes[-1][0] < osmid:
-            self.nodes.append((osmid, lon, lat))
-        else:
-            bisect.insort(self.nodes, (osmid, lon, lat))
-    
-    def serialize(self):
-        ids, lons, lats = unzip_nodes(self.nodes)
-        nodes = _DeltaCoords()
-        nodes.ids.extend(ids)
-        nodes.lons.extend(lons)
-        nodes.lats.extend(lats)
-        return nodes.SerializeToString()
-    
-    def deserialize(self, data):
-        nodes = _DeltaCoords()
-        nodes.ParseFromString(data)
-        self.nodes = zip_nodes(
-            nodes.ids, nodes.lons, nodes.lats)
-
-class DeltaCoordsDB(object):
-    def __init__(self, filename, mode='w', estimated_records=0, delta_nodes_cache_size=100, delta_nodes_size=6):
-        self.db = BDB(filename, mode, estimated_records)
-        self.mode = mode
-        self.delta_nodes = {}
-        self.delta_node_ids = deque()
-        self.delta_nodes_cache_size = delta_nodes_cache_size
-        self.delta_nodes_size = delta_nodes_size
-    
-    def put(self, osmid, lon, lat):
-        if self.mode == 'r':
-            return None
-        delta_id = osmid >> self.delta_nodes_size
-        if delta_id not in self.delta_nodes:
-            self.fetch_delta_node(delta_id)
-        delta_node = self.delta_nodes[delta_id]
-        delta_node.add(osmid, lon, lat)
-        return True
-    
-    put_marshaled = put
-    
-    def get(self, osmid):
-        delta_id = osmid >> self.delta_nodes_size
-        if delta_id not in self.delta_nodes:
-            self.fetch_delta_node(delta_id)
-        return self.delta_nodes[delta_id].get(osmid)
-    
-    def get_coords(self, osmids):
-        coords = []
-        for osmid in osmids:
-            coord = self.get(osmid)
-            if coord is None:
-                return
-            coords.append(coord)
-        return coords
-    
-    def close(self):
-        for node_id, node in self.delta_nodes.iteritems():
-            self._put(node_id, node)
-        self.delta_nodes = {}
-        self.delta_node_ids = deque()
-        self.db.close()
-    
-    def _put(self, delta_id, delta_node):
-        data = delta_node.serialize()
-        self.db.put_marshaled(delta_id, data)
-    
-    def _get(self, delta_id):
-        return DeltaNodes(data=self.db.get_raw(delta_id))
-    
-    def fetch_delta_node(self, delta_id):
-        if len(self.delta_node_ids) >= self.delta_nodes_cache_size:
-            rm_id = self.delta_node_ids.popleft()
-            rm_node = self.delta_nodes.pop(rm_id)
-            if rm_node.changed:
-                self._put(rm_id, rm_node)
-        new_node = self._get(delta_id)
-        if new_node is None:
-            new_node = DeltaNodes()
-        self.delta_nodes[delta_id] = new_node
-        self.delta_node_ids.append(delta_id)
-
diff --git a/imposm/db/postgis.py b/imposm/db/postgis.py
index b6b732e..6514027 100644
--- a/imposm/db/postgis.py
+++ b/imposm/db/postgis.py
@@ -1,11 +1,11 @@
-# Copyright 2011 Omniscale (http://omniscale.com)
-# 
+# Copyright 2011-2012 Omniscale (http://omniscale.com)
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #     http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -13,6 +13,8 @@
 # limitations under the License.
 
 import time
+import uuid
+from contextlib import contextmanager
 
 import psycopg2
 import psycopg2.extensions
@@ -23,14 +25,26 @@ log = logging.getLogger(__name__)
 from imposm import config
 from imposm.mapping import UnionView, GeneralizedTable, Mapping
 
+unknown = object()
+
 class PostGISDB(object):
-    def __init__(self, db_conf):
+    insert_data_format = 'tuple'
+
+    def __init__(self, db_conf, use_geometry_columns_table=unknown):
         self.db_conf = db_conf
         self.srid = int(db_conf['proj'].split(':')[1])
+
         self._insert_stmts = {}
         self._connection = None
         self._cur = None
 
+        if use_geometry_columns_table is unknown:
+            if self.is_postgis_2():
+                use_geometry_columns_table = False
+            else:
+                use_geometry_columns_table = True
+        self.use_geometry_columns_table = use_geometry_columns_table
+
     @property
     def table_prefix(self):
         return self.db_conf.prefix.rstrip('_') + '_'
@@ -38,6 +52,12 @@ class PostGISDB(object):
     def to_tablename(self, name):
         return self.table_prefix + name.lower()
 
+    def is_postgis_2(self):
+        cur = self.connection.cursor()
+        cur.execute('SELECT postgis_version()')
+        version_string = cur.fetchone()[0]
+        return version_string.strip()[0] == '2'
+
     @property
     def connection(self):
         if not self._connection:
@@ -55,16 +75,27 @@ class PostGISDB(object):
             self._connection.set_isolation_level(
                 psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
         return self._connection
-    
+
     def commit(self):
         self.connection.commit()
-    
+
     @property
     def cur(self):
         if self._cur is None:
             self._cur = self.connection.cursor()
         return self._cur
-    
+
+    @contextmanager
+    def savepoint(self, cur, raise_errors=False):
+        savepoint_name = 'savepoint' + uuid.uuid4().get_hex()
+        try:
+            cur.execute('SAVEPOINT %s' % savepoint_name)
+            yield
+        except psycopg2.ProgrammingError:
+            cur.execute('ROLLBACK TO SAVEPOINT %s' % savepoint_name)
+            if raise_errors:
+                raise
+
     def insert(self, mapping, insert_data, tries=0):
         insert_stmt = self.insert_stmt(mapping)
         try:
@@ -91,10 +122,24 @@ class PostGISDB(object):
                     self.connection.commit()
 
         self.connection.commit()
-    
+
+    def post_insert(self, mappings):
+        mappings = [m for m in mappings.values() if isinstance(m, (GeneralizedTable, Mapping))]
+        for mapping in mappings:
+            table_name = self.to_tablename(mapping.name)
+            self.create_geom_index(table_name)
+
+    def create_geom_index(self, table_name):
+        idx_name = '%s_geom' % table_name
+        cur = self.connection.cursor()
+        cur.execute("""
+            CREATE INDEX "%s" ON "%s" USING GIST (geometry)
+        """ % (idx_name,table_name))
+        self.connection.commit()
+
     def geom_wrapper(self, geom):
         return psycopg2.Binary(geom.wkb)
-    
+
     def reconnect(self):
         if self._connection:
             try:
@@ -128,14 +173,17 @@ class PostGISDB(object):
         for mapping in mappings:
             self.create_table(mapping)
 
+    def drop_table_or_view(self, cur, name):
+        with self.savepoint(cur):
+            cur.execute('DROP TABLE "' + name + '" CASCADE')
+        with self.savepoint(cur):
+            cur.execute('DROP VIEW "' + name + '" CASCADE')
+
     def create_table(self, mapping):
         tablename = self.table_prefix + mapping.name
         cur = self.connection.cursor()
-        cur.execute('SAVEPOINT pre_drop_tables')
-        try:
-            cur.execute('DROP TABLE "' + tablename + '" CASCADE')
-        except psycopg2.ProgrammingError:
-            cur.execute('ROLLBACK TO SAVEPOINT pre_drop_tables')
+
+        self.drop_table_or_view(cur, tablename)
 
         extra_fields = ''
         for n, t in mapping.fields:
@@ -145,7 +193,7 @@ class PostGISDB(object):
             serial_column = "id SERIAL PRIMARY KEY,"
         else:
             serial_column = ""
-            
+
         cur.execute("""
             CREATE TABLE "%s" (
                 %s
@@ -153,27 +201,42 @@ class PostGISDB(object):
                 %s
             );
         """ % (tablename, serial_column, extra_fields))
-        cur.execute("""
-            SELECT AddGeometryColumn ('', '%(tablename)s', 'geometry',
-                                      %(srid)s, '%(pg_geometry_type)s', 2)
-        """ % dict(tablename=tablename, srid=self.srid,
-                   pg_geometry_type=mapping.geom_type))
 
+        self.create_geometry_column(cur, tablename, mapping)
+
+        self.create_field_indices(cur=cur, mapping=mapping, tablename=tablename)
+
+
+    def create_geometry_column(self, cur, tablename, mapping):
+        if self.use_geometry_columns_table:
+            cur.execute("""
+                SELECT AddGeometryColumn ('', '%(tablename)s', 'geometry',
+                                          %(srid)s, '%(pg_geometry_type)s', 2)
+            """ % dict(tablename=tablename, srid=self.srid,
+                       pg_geometry_type=mapping.geom_type))
+        else:
+            cur.execute("""
+                ALTER TABLE %(tablename)s ADD COLUMN geometry geometry(%(pg_geometry_type)s, %(srid)s);
+            """ % dict(tablename=tablename, srid=self.srid,
+                       pg_geometry_type=mapping.geom_type))
+
+    def create_field_indices(self, cur, mapping, tablename):
         for n, t in mapping.fields:
             if isinstance(t, TrigramIndex):
                 cur.execute("""
                     CREATE INDEX "%(tablename)s_trgm_idx_%(column)s" ON "%(tablename)s" USING GIST ("%(column)s" gist_trgm_ops)
                 """ % dict(tablename=tablename, column=n))
+            if isinstance(t, (StringIndex, Index)):
+                cur.execute("""
+                    CREATE INDEX "%(tablename)s_idx_%(column)s" ON "%(tablename)s" ("%(column)s")
+                """ % dict(tablename=tablename, column=n))
 
-        cur.execute("""
-            CREATE INDEX "%(tablename)s_geom" ON "%(tablename)s" USING GIST (geometry)
-        """ % dict(tablename=tablename))
-    
     def swap_tables(self, new_prefix, existing_prefix, backup_prefix):
         cur = self.connection.cursor()
 
         self.remove_tables(backup_prefix)
-        
+        self.remove_views(backup_prefix)
+
         cur.execute('SELECT tablename FROM pg_tables WHERE tablename like %s', (existing_prefix + '%', ))
         existing_tables = []
         for row in cur:
@@ -182,6 +245,14 @@ class PostGISDB(object):
                 # check for overlapping prefixes: osm_ but not osm_new_ or osm_backup_
                 existing_tables.append(table_name)
 
+        cur.execute('SELECT viewname FROM pg_views WHERE viewname like %s', (existing_prefix + '%', ))
+        existing_views = []
+        for row in cur:
+            view_name = row[0]
+            if view_name.startswith(existing_prefix) and not view_name.startswith((new_prefix, backup_prefix)):
+                # check for overlapping prefixes: osm_ but not osm_new_ or osm_backup_
+                existing_views.append(view_name)
+
         cur.execute('SELECT indexname FROM pg_indexes WHERE indexname like %s', (existing_prefix + '%', ))
         existing_indexes = set()
         for row in cur:
@@ -189,7 +260,7 @@ class PostGISDB(object):
             if index_name.startswith(existing_prefix) and not index_name.startswith((new_prefix, backup_prefix)):
                 # check for overlapping prefixes: osm_ but not osm_new_ or osm_backup_
                 existing_indexes.add(index_name)
-        
+
         cur.execute('SELECT relname FROM pg_class WHERE relname like %s', (existing_prefix + '%_id_seq', ))
         existing_seq = set()
         for row in cur:
@@ -204,73 +275,100 @@ class PostGISDB(object):
             table_name = row[0]
             new_tables.append(table_name)
 
+        cur.execute('SELECT viewname FROM pg_views WHERE viewname like %s', (new_prefix + '%', ))
+        new_views = []
+        for row in cur:
+            view_name = row[0]
+            new_views.append(view_name)
+
         cur.execute('SELECT indexname FROM pg_indexes WHERE indexname like %s', (new_prefix + '%', ))
         new_indexes = set()
         for row in cur:
             index_name = row[0]
             new_indexes.add(index_name)
-        
+
         cur.execute('SELECT relname FROM pg_class WHERE relname like %s', (new_prefix + '%_id_seq', ))
         new_seq = []
         for row in cur:
             seq_name = row[0]
             new_seq.append(seq_name)
 
-        
+
         if not new_tables:
             raise RuntimeError('did not found tables to swap')
-        
+
         # rename existing tables (osm_) to backup_prefix (osm_backup_)
         for table_name in existing_tables:
             rename_to = table_name.replace(existing_prefix, backup_prefix)
             cur.execute('ALTER TABLE "%s" RENAME TO "%s"' % (table_name, rename_to))
 
             for idx in existing_indexes:
-                if idx in (table_name + '_geom', table_name + '_pkey') or idx.startswith(table_name + '_trgm_idx_'):
+                if idx in (table_name + '_geom', table_name + '_pkey') or idx.startswith(table_name + '_trgm_idx_') or idx.startswith(table_name + '_idx_'):
                     new_idx = idx.replace(table_name, rename_to, 1)
                     cur.execute('ALTER INDEX "%s" RENAME TO "%s"' % (idx, new_idx))
             if table_name + '_id_seq' in existing_seq:
                 cur.execute('ALTER SEQUENCE "%s" RENAME TO "%s"' % (table_name + '_id_seq', rename_to + '_id_seq'))
-            cur.execute('UPDATE geometry_columns SET f_table_name = %s WHERE f_table_name = %s', (rename_to, table_name))
-            
+            if self.use_geometry_columns_table:
+                cur.execute('UPDATE geometry_columns SET f_table_name = %s WHERE f_table_name = %s', (rename_to, table_name))
+
+        # rename existing views (osm_) to backup_prefix (osm_backup_)
+        for view_name in existing_views:
+            rename_to = view_name.replace(existing_prefix, backup_prefix)
+            cur.execute('ALTER VIEW "%s" RENAME TO "%s"' % (view_name, rename_to))
+
+            if self.use_geometry_columns_table:
+                cur.execute('UPDATE geometry_columns SET f_table_name = %s WHERE f_table_name = %s', (rename_to, view_name))
+
         # rename new tables (osm_new_) to existing_prefix (osm_)
         for table_name in new_tables:
             rename_to = table_name.replace(new_prefix, existing_prefix)
             cur.execute('ALTER TABLE "%s" RENAME TO "%s"' % (table_name, rename_to))
 
             for idx in new_indexes:
-                if idx in (table_name + '_geom', table_name + '_pkey') or idx.startswith(table_name + '_trgm_idx_'):
+                if idx in (table_name + '_geom', table_name + '_pkey') or idx.startswith(table_name + '_trgm_idx_') or idx.startswith(table_name + '_idx_'):
                     new_idx = idx.replace(table_name, rename_to, 1)
                     cur.execute('ALTER INDEX "%s" RENAME TO "%s"' % (idx, new_idx))
             if table_name + '_id_seq' in new_seq:
                 cur.execute('ALTER SEQUENCE "%s" RENAME TO "%s"' % (table_name + '_id_seq', rename_to + '_id_seq'))
-            cur.execute('UPDATE geometry_columns SET f_table_name = %s WHERE f_table_name = %s', (rename_to, table_name))
-        
+            if self.use_geometry_columns_table:
+                cur.execute('UPDATE geometry_columns SET f_table_name = %s WHERE f_table_name = %s', (rename_to, table_name))
+
+        # rename new views (osm_new_) to existing_prefix (osm_)
+        for view_name in new_views:
+            rename_to = view_name.replace(new_prefix, existing_prefix)
+            cur.execute('ALTER VIEW "%s" RENAME TO "%s"' % (view_name, rename_to))
+
+            if self.use_geometry_columns_table:
+                cur.execute('UPDATE geometry_columns SET f_table_name = %s WHERE f_table_name = %s', (rename_to, view_name))
+
+
     def remove_tables(self, prefix):
         cur = self.connection.cursor()
         cur.execute('SELECT tablename FROM pg_tables WHERE tablename like %s', (prefix + '%', ))
         remove_tables = [row[0] for row in cur]
-        
+
         for table_name in remove_tables:
             cur.execute("DROP TABLE %s CASCADE" % (table_name, ))
-            cur.execute("DELETE FROM geometry_columns WHERE f_table_name = %s", (table_name, ))
-        
+            if self.use_geometry_columns_table:
+                cur.execute("DELETE FROM geometry_columns WHERE f_table_name = %s", (table_name, ))
+
 
     def remove_views(self, prefix):
         cur = self.connection.cursor()
         cur.execute('SELECT viewname FROM pg_views WHERE viewname like %s', (prefix + '%', ))
         remove_views = [row[0] for row in cur]
-        
+
         for view_name in remove_views:
             cur.execute('DROP VIEW "%s" CASCADE' % (view_name, ))
-            cur.execute("DELETE FROM geometry_columns WHERE f_table_name = %s", (view_name, ))
-        
-    
+            if self.use_geometry_columns_table:
+                cur.execute("DELETE FROM geometry_columns WHERE f_table_name = %s", (view_name, ))
+
+
     def create_views(self, mappings, ignore_errors=False):
         for mapping in mappings.values():
             if isinstance(mapping, UnionView):
                 PostGISUnionView(self, mapping).create(ignore_errors=ignore_errors)
-    
+
     def create_generalized_tables(self, mappings):
         mappings = [m for m in mappings.values() if isinstance(m, GeneralizedTable)]
         for mapping in sorted(mappings, key=lambda x: x.name, reverse=True):
@@ -298,7 +396,6 @@ class PostGISDB(object):
         cur.execute("VACUUM ANALYZE")
         self.connection.set_isolation_level(old_isolation_level)
 
-
 class PostGISUnionView(object):
     def __init__(self, db, mapping):
         self.mapping = mapping
@@ -322,10 +419,11 @@ class PostGISUnionView(object):
         selects = '\nUNION ALL\n'.join(selects)
 
         stmt = 'CREATE VIEW "%s" as (\n%s\n)' % (self.view_name, selects)
-        
+
         return stmt
 
     def _geom_table_stmt(self):
+        assert self.db.use_geometry_columns_table
         stmt = "insert into geometry_columns values ('', 'public', '%s', 'geometry', 2, %d, 'GEOMETRY')" % (
             self.view_name, self.db.srid)
         return stmt
@@ -349,22 +447,18 @@ class PostGISUnionView(object):
     def create(self, ignore_errors):
         cur = self.db.connection.cursor()
         cur.execute('BEGIN')
-        try:
-            cur.execute('SAVEPOINT pre_create_view')
-            cur.execute('SELECT * FROM pg_views WHERE viewname = %s', (self.view_name, ))
-            if cur.fetchall():
-                cur.execute('DROP VIEW %s' % (self.view_name, ))
+
+        self.db.drop_table_or_view(cur, self.view_name)
+
+        with self.db.savepoint(cur, raise_errors=not ignore_errors):
             cur.execute(self._view_stmt())
-        except psycopg2.ProgrammingError:
-            cur.execute('ROLLBACK TO SAVEPOINT pre_create_view')
-            if not ignore_errors:
-                raise
 
-        cur.execute('SELECT * FROM geometry_columns WHERE f_table_name = %s', (self.view_name, ))
-        if cur.fetchall():
-            # drop old entry to handle changes of SRID
-            cur.execute('DELETE FROM geometry_columns WHERE f_table_name = %s', (self.view_name, ))
-        cur.execute(self._geom_table_stmt())
+        if self.db.use_geometry_columns_table:
+            cur.execute('SELECT * FROM geometry_columns WHERE f_table_name = %s', (self.view_name, ))
+            if cur.fetchall():
+                # drop old entry to handle changes of SRID
+                cur.execute('DELETE FROM geometry_columns WHERE f_table_name = %s', (self.view_name, ))
+            cur.execute(self._geom_table_stmt())
 
 
 class PostGISGeneralizedTable(object):
@@ -373,11 +467,8 @@ class PostGISGeneralizedTable(object):
         self.mapping = mapping
         self.table_name = db.to_tablename(mapping.name)
 
-    def _idx_stmt(self):
-        return 'CREATE INDEX "%s_geom" ON "%s" USING GIST (geometry)' % (
-            self.table_name, self.table_name)
-
     def _geom_table_stmt(self):
+        assert self.db.use_geometry_columns_table
         stmt = "insert into geometry_columns values ('', 'public', '%s', 'geometry', 2, %d, 'GEOMETRY')" % (
             self.table_name, self.db.srid)
         return stmt
@@ -386,18 +477,17 @@ class PostGISGeneralizedTable(object):
         fields = ', '.join([n for n, t in self.mapping.fields])
         if fields:
             fields += ','
+        where = ' WHERE ST_IsValid(ST_SimplifyPreserveTopology(geometry, %f))' % (self.mapping.tolerance,)
         if self.mapping.where:
-            where = ' WHERE ' + self.mapping.where
-        else:
-            where = ''
-        
+            where = ' AND '.join([where, self.mapping.where])
+
         if config.imposm_pg_serial_id:
             serial_column = "id, "
         else:
             serial_column = ""
-    
+
         return """CREATE TABLE "%s" AS (SELECT %s osm_id, %s
-            ST_Simplify(geometry, %f) as geometry from "%s"%s)""" % (
+            ST_SimplifyPreserveTopology(geometry, %f) as geometry from "%s"%s)""" % (
             self.table_name, serial_column, fields, self.mapping.tolerance,
             self.db.to_tablename(self.mapping.origin.name),
             where)
@@ -405,20 +495,23 @@ class PostGISGeneralizedTable(object):
     def create(self):
         cur = self.db.connection.cursor()
         cur.execute('BEGIN')
-        try:
-            cur.execute('SAVEPOINT pre_drop_table')
-            cur.execute('DROP TABLE "%s" CASCADE' % (self.table_name, ))
-        except psycopg2.ProgrammingError:
-            cur.execute('ROLLBACK TO SAVEPOINT pre_drop_table')
-        
+
+        self.db.drop_table_or_view(cur, self.table_name)
+
         cur.execute(self._stmt())
-        cur.execute(self._idx_stmt())
 
-        cur.execute('SELECT * FROM geometry_columns WHERE f_table_name = %s', (self.table_name, ))
-        if cur.fetchall():
-            # drop old entry to handle changes of SRID
-            cur.execute('DELETE FROM geometry_columns WHERE f_table_name = %s', (self.table_name, ))
-        cur.execute(self._geom_table_stmt())
+        if self.db.use_geometry_columns_table:
+            cur.execute('SELECT * FROM geometry_columns WHERE f_table_name = %s', (self.table_name, ))
+            if cur.fetchall():
+                # drop old entry to handle changes of SRID
+                cur.execute('DELETE FROM geometry_columns WHERE f_table_name = %s', (self.table_name, ))
+            cur.execute(self._geom_table_stmt())
 
 class TrigramIndex(object):
     pass
+
+class StringIndex(object):
+    pass
+
+class Index(object):
+    pass
diff --git a/imposm/dbimporter.py b/imposm/dbimporter.py
index 652c5c2..3f00732 100644
--- a/imposm/dbimporter.py
+++ b/imposm/dbimporter.py
@@ -1,11 +1,11 @@
-# Copyright 2011 Omniscale (http://omniscale.com)
-# 
+# Copyright 2011, 2012 Omniscale (http://omniscale.com)
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #     http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -16,7 +16,7 @@ from collections import defaultdict
 from multiprocessing import Process
 
 import threading
-from Queue import Queue 
+from Queue import Queue
 
 from imposm.base import OSMElem
 from imposm.geom import IncompletePolygonError
@@ -41,6 +41,7 @@ class ImporterProcess(Process):
         self.osm_cache = osm_cache
         self.db = db
         self.dry_run = dry_run
+        self.db_queue = Queue(256)
 
     def run(self):
         self.setup()
@@ -49,8 +50,9 @@ class ImporterProcess(Process):
         self.teardown()
 
     def setup(self):
-        self.db_queue = Queue(256)
-        self.db_importer = DBImporter(self.db_queue, self.db, dry_run=self.dry_run)
+        self.db_importer = threading.Thread(target=self.db_importer,
+            args=(self.db_queue, self.db),
+            kwargs=dict(dry_run=self.dry_run))
         self.db_importer.start()
 
     def doit(self):
@@ -61,6 +63,35 @@ class ImporterProcess(Process):
         self.db_queue.put(None)
         self.db_importer.join()
 
+class TupleBasedImporter(ImporterProcess):
+    def db_importer(self, queue, db, dry_run=False):
+        db.reconnect()
+        mappings = defaultdict(list)
+
+        while True:
+            data = queue.get()
+            if data is None:
+                break
+
+            mapping, osm_id, osm_elem, extra_args = data
+            insert_data = mappings[mapping]
+
+            if isinstance(osm_elem.geom, (list)):
+                for geom in osm_elem.geom:
+                    insert_data.append((osm_id, db.geom_wrapper(geom)) + tuple(extra_args))
+            else:
+                insert_data.append((osm_id, db.geom_wrapper(osm_elem.geom)) + tuple(extra_args))
+
+            if len(insert_data) >= 128:
+                if not dry_run:
+                    db.insert(mapping, insert_data)
+                del mappings[mapping]
+
+        # flush
+        for mapping, insert_data in mappings.iteritems():
+            if not dry_run:
+                db.insert(mapping, insert_data)
+
     def insert(self, mappings, osm_id, geom, tags):
         inserted = False
         for type, ms in mappings:
@@ -76,15 +107,81 @@ class ImporterProcess(Process):
                     pass
         return inserted
 
+class DictBasedImporter(ImporterProcess):
+    def db_importer(self, queue, db, dry_run=False):
+        db.reconnect()
+        insert_data = []
+
+        while True:
+            data = queue.get()
+            if data is None:
+                break
+
+            data['geometry'] = db.geom_wrapper(data['geometry'])
+            insert_data.append(data)
+
+            if len(insert_data) >= 128:
+                if not dry_run:
+                    db.insert(insert_data)
+                insert_data = []
+        # flush
+        if not dry_run:
+            db.insert(insert_data)
+
+
+    def insert(self, mappings, osm_id, geom, tags):
+        inserted = False
+        osm_objects = {}
+
+        for type, ms in mappings:
+            for m in ms:
+                osm_elem = OSMElem(osm_id, geom, type, tags)
+                try:
+                    m.filter(osm_elem)
+                except DropElem:
+                    continue
+
+                if m.geom_type in osm_objects:
+                    obj = osm_objects[m.geom_type]
+                    obj['fields'].update(m.field_dict(osm_elem))
+                    obj['fields'][type[0]] = type[1]
+                    obj['mapping_names'].append(m.name)
+                else:
+                    try:
+                        m.build_geom(osm_elem)
+                    except DropElem:
+                        continue
+                    obj = {}
+                    obj['fields'] = m.field_dict(osm_elem)
+                    obj['fields'][type[0]] = type[1]
+                    obj['osm_id'] = osm_id
+                    obj['geometry'] = osm_elem.geom
+                    obj['mapping_names'] = [m.name]
+                    osm_objects[m.geom_type] = obj
+
+                inserted = True
+
+        for obj in osm_objects.itervalues():
+            if isinstance(obj['geometry'], (list, )):
+                for geom in obj['geometry']:
+                    obj_part = obj.copy()
+                    obj_part['geometry'] = geom
+                    self.db_queue.put(obj_part)
+            else:
+                self.db_queue.put(obj)
+
+        return inserted
+
+
 class NodeProcess(ImporterProcess):
     name = 'node'
 
-    def doit(self):        
+    def doit(self):
         while True:
             nodes = self.in_queue.get()
             if nodes is None:
                 break
-            
+
             for node in nodes:
                 mappings = self.mapper.for_nodes(node.tags)
                 if not mappings:
@@ -92,6 +189,12 @@ class NodeProcess(ImporterProcess):
 
                 self.insert(mappings, node.osm_id, node.coord, node.tags)
 
+class NodeProcessDict(NodeProcess, DictBasedImporter):
+    pass
+
+class NodeProcessTuple(NodeProcess, TupleBasedImporter):
+    pass
+
 class WayProcess(ImporterProcess):
     name = 'way'
 
@@ -118,7 +221,7 @@ class WayProcess(ImporterProcess):
                         skip_id = inserted_ways.next()
                     except StopIteration:
                         skip_id = 2**64
-                
+
                 if skip_id == way.osm_id:
                     continue
 
@@ -129,17 +232,22 @@ class WayProcess(ImporterProcess):
                 coords = coords_cache.get_coords(way.refs)
 
                 if not coords:
-                    print 'missing coords for way %s' % (way.osm_id, )
+                    log.debug('missing coords for way %s', way.osm_id)
                     continue
 
                 self.insert(mappings, way.osm_id, coords, way.tags)
 
+class WayProcessDict(WayProcess, DictBasedImporter):
+    pass
+
+class WayProcessTuple(WayProcess, TupleBasedImporter):
+    pass
 
 class RelationProcess(ImporterProcess):
     name = 'relation'
 
     def __init__(self, in_queue, db, mapper, osm_cache, dry_run, inserted_way_queue):
-        ImporterProcess.__init__(self, in_queue, db, mapper, osm_cache, dry_run)
+        super(RelationProcess, self).__init__(in_queue, db, mapper, osm_cache, dry_run)
         self.inserted_way_queue = inserted_way_queue
 
     def doit(self):
@@ -150,7 +258,7 @@ class RelationProcess(ImporterProcess):
             relations = self.in_queue.get()
             if relations is None:
                 break
-            
+
             for relation in relations:
                 builder = RelationBuilder(relation, ways_cache, coords_cache)
                 try:
@@ -165,41 +273,8 @@ class RelationProcess(ImporterProcess):
                     if inserted:
                         builder.mark_inserted_ways(self.inserted_way_queue)
 
+class RelationProcessDict(RelationProcess, DictBasedImporter):
+    pass
 
-class DBImporter(threading.Thread):
-    def __init__(self, queue, db, dry_run=False):
-        threading.Thread.__init__(self)
-        self.db = db
-        self.db.reconnect()
-        self.queue = queue
-        self.cur = None
-        self.dry_run = dry_run
-        self.mappings = defaultdict(list)
-
-    def run(self):
-        # cProfile.runctx('self.doit()', globals(), locals())
-        # def doit(self):
-        while True:
-            data = self.queue.get()
-            if data is None:
-                break
-
-            mapping, osm_id, osm_elem, extra_args = data
-            insert_data = self.mappings[mapping]
-            
-            if isinstance(osm_elem.geom, (list)):
-                for geom in osm_elem.geom:
-                    insert_data.append((osm_id, self.db.geom_wrapper(geom)) + tuple(extra_args))
-            else:
-                insert_data.append((osm_id, self.db.geom_wrapper(osm_elem.geom)) + tuple(extra_args))
-
-            if len(insert_data) >= 128:
-                if not self.dry_run:
-                    self.db.insert(mapping, insert_data)
-                del self.mappings[mapping]
-
-        # flush
-        for mapping, insert_data in self.mappings.iteritems():
-            if not self.dry_run:
-                self.db.insert(mapping, insert_data)
-
+class RelationProcessTuple(RelationProcess, TupleBasedImporter):
+    pass
diff --git a/imposm/geom.py b/imposm/geom.py
index aa374ff..42a5715 100644
--- a/imposm/geom.py
+++ b/imposm/geom.py
@@ -1,11 +1,11 @@
-# Copyright 2011 Omniscale (http://omniscale.com)
-# 
+# Copyright 2011, 2012 Omniscale (http://omniscale.com)
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #     http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -15,13 +15,24 @@
 from __future__ import division
 
 import math
-
+import codecs
+import os
+import shapely.geometry
 import shapely.geos
+import shapely.prepared
 from shapely.geometry.base import BaseGeometry
 from shapely import geometry
 from shapely import wkt
+from shapely.ops import cascaded_union, linemerge
+from shapely.topology import TopologicalError
+
+try:
+    import rtree
+except ImportError:
+    rtree = None
 
 from imposm import config
+from imposm.util.geom import load_polygons, load_datasource, build_multipolygon
 
 import logging
 log = logging.getLogger(__name__)
@@ -42,12 +53,12 @@ SHAPELY_SUPPORTS_BUFFER = shapely.geos.geos_capi_version >= (1, 6, 0)
 def validate_and_simplify(geom, meter_units=False):
     if SHAPELY_SUPPORTS_BUFFER:
         try:
-            # buffer(0) is nearly fast as is_valid 
+            # buffer(0) is nearly fast as is_valid
             return geom.buffer(0)
         except ValueError:
             # shapely raises ValueError if buffer(0) result is empty
             raise InvalidGeometryError('geometry is empty')
-    
+
     orig_geom = geom
     if not geom.is_valid:
         tolerance = TOLERANCE_METERS if meter_units else TOLERANCE_DEEGREES
@@ -77,8 +88,8 @@ class GeomBuilder(object):
         if geom_wkt is None or geom is None:
             # unable to build valid wkt (non closed polygon, etc)
             raise InvalidGeometryError()
-        return geom    
-    
+        return geom
+
     def check_geom_type(self, geom):
         return
 
@@ -96,23 +107,23 @@ class GeomBuilder(object):
         if geom is None:
             # unable to build valid wkt (non closed polygon, etc)
             raise InvalidGeometryError()
-        return geom    
+        return geom
 
 class PointBuilder(GeomBuilder):
     def to_wkt(self, data):
         if len(data) != 2:
             return None
         return 'POINT(%f %f)' % data
-    
+
     def to_geom(self, data):
         if len(data) != 2:
             return None
         return geometry.Point(*data)
-    
+
     def check_geom_type(self, geom):
         if geom.type != 'Point':
             raise InvalidGeometryError('expected Point, got %s' % geom.type)
-    
+
     def build_checked_geom(self, osm_elem, validate=False):
         geom = self.build_geom(osm_elem)
         if not validate or geom.is_valid:
@@ -126,16 +137,16 @@ class PolygonBuilder(GeomBuilder):
         if len(data) >= 4 and data[0] == data[-1]:
             return 'POLYGON((' + ', '.join('%f %f' % p for p in data) + '))'
         return None
-    
+
     def to_geom(self, data):
         if len(data) >= 4 and data[0] == data[-1]:
             return geometry.Polygon(data)
         return None
-    
+
     def check_geom_type(self, geom):
         if geom.type not in ('Polygon', 'MultiPolygon'):
             raise InvalidGeometryError('expected Polygon or MultiPolygon, got %s' % geom.type)
-    
+
     def build_checked_geom(self, osm_elem, validate=False):
         geom = self.build_geom(osm_elem)
         if not validate:
@@ -181,3 +192,232 @@ class LineStringBuilder(GeomBuilder):
         else:
             raise InvalidGeometryError('invalid geometry for %s: %s, %s' %
                                        (osm_elem.osm_id, geom, osm_elem.coords))
+
+def tile_bbox(bbox, grid_width):
+    """
+    Tile bbox into multiple sub-boxes, each of `grid_width` size.
+
+    >>> list(tile_bbox((-1, 1, 0.49, 1.51), 0.5)) #doctest: +NORMALIZE_WHITESPACE
+    [(-1.0, 1.0, -0.5, 1.5),
+     (-1.0, 1.5, -0.5, 2.0),
+     (-0.5, 1.0, 0.0, 1.5),
+     (-0.5, 1.5, 0.0, 2.0),
+     (0.0, 1.0, 0.5, 1.5),
+     (0.0, 1.5, 0.5, 2.0)]
+    """
+    min_x = math.floor(bbox[0]/grid_width) * grid_width
+    min_y = math.floor(bbox[1]/grid_width) * grid_width
+    max_x = math.ceil(bbox[2]/grid_width) * grid_width
+    max_y = math.ceil(bbox[3]/grid_width) * grid_width
+
+    x_steps = (max_x - min_x) / grid_width
+    y_steps = (max_y - min_y) / grid_width
+
+    for x in xrange(int(x_steps)):
+        for y in xrange(int(y_steps)):
+            yield (
+                min_x + x * grid_width,
+                min_y + y * grid_width,
+                min_x + (x + 1) * grid_width,
+                min_y + (y + 1)* grid_width,
+            )
+
+def split_polygon_at_grid(geom, grid_width=0.1):
+    """
+    >>> p = list(split_polygon_at_grid(geometry.box(-0.5, 1, 0.2, 2), 1))
+    >>> p[0].contains(geometry.box(-0.5, 1, 0, 2))
+    True
+    >>> p[0].area == geometry.box(-0.5, 1, 0, 2).area
+    True
+    >>> p[1].contains(geometry.box(0, 1, 0.2, 2))
+    True
+    >>> p[1].area == geometry.box(0, 1, 0.2, 2).area
+    True
+    """
+    if not geom.is_valid:
+        geom = geom.buffer(0)
+    for split_box in tile_bbox(geom.bounds, grid_width):
+        try:
+            polygon_part = geom.intersection(shapely.geometry.box(*split_box))
+        except TopologicalError:
+            continue
+        if not polygon_part.is_empty:
+            yield polygon_part
+
+def load_geom(source):
+    geom = None
+    # source is a wkt file
+    if os.path.exists(os.path.abspath(source)):
+        data = None
+        with open(os.path.abspath(source), 'r') as fp:
+            data = fp.read(50)
+        # load WKT geometry and remove leading whitespaces
+        if data.lower().lstrip().startswith(('polygon', 'multipolygon')):
+            geom = load_polygons(source)
+    # source is an OGR datasource
+    if geom is None:
+        geom = load_datasource(source)
+
+    if geom:
+        # get the first and maybe only geometry
+        if not check_wgs84_srs(geom[0]):
+            log.error('Geometry is not in EPSG:4326')
+            return None
+        if rtree:
+            return LimitRTreeGeometry(geom)
+        else:
+            log.info('You should install RTree for large --limit-to polygons')
+            return LimitPolygonGeometry(build_multipolygon(geom)[1])
+    return None
+    
+def check_wgs84_srs(geom):
+    bbox = geom.bounds
+    if bbox[0] >= -180 and bbox[1] >= -90 and bbox[2] <= 180 and bbox[3] <= 90:
+        return True
+    return False
+
+class EmtpyGeometryError(Exception):
+    pass
+
+class LimitPolygonGeometry(object):
+    def __init__(self, shapely_geom):
+        self._geom = shapely_geom
+        self._prepared_geom = None
+        self._prepared_counter = 0
+        self._prepared_max = 100000
+
+    @property
+    def geom(self):
+        # GEOS internal data structure for prepared geometries grows over time,
+        # recreate to limit memory consumption
+        if not self._prepared_geom or self._prepared_counter > self._prepared_max:
+            print 'create prepared'
+            self._prepared_geom = shapely.prepared.prep(self._geom)
+            self._prepared_counter = 0
+        self._prepared_counter += 1
+        return self._prepared_geom
+
+    def intersection(self, geom):
+        if self.geom.contains_properly(geom):
+            # no need to limit contained geometries
+            return geom
+
+        new_geom = None
+        if self.geom.intersects(geom):
+            try:
+                # can not use intersection with prepared geom
+                new_geom = self._geom.intersection(geom)
+            except TopologicalError:
+                pass
+
+        if not new_geom or new_geom.is_empty:
+            raise EmtpyGeometryError('No intersection or empty geometry')
+
+        new_geom = filter_geometry_by_type(new_geom, geom.type)
+        if new_geom:
+            return new_geom
+
+        raise EmtpyGeometryError('No intersection or empty geometry')
+
+def filter_geometry_by_type(geometry, geom_type):
+    """
+    Filter (multi)geometry for compatible `geom_type`,
+    because we can't insert points into linestring tables for example
+    """
+    if geometry.type == geom_type:
+        # same type is fine
+        return geometry
+
+    if geometry.type == 'MultiPolygon' and geom_type == 'Polygon':
+        # polygon mappings should also support multipolygons
+        return geometry
+
+    if hasattr(geometry, 'geoms'):
+        # GeometryCollection or MultiLineString? return list of geometries
+        geoms = []
+        for part in geometry.geoms:
+            # only parts with same type
+            if part.type == geom_type:
+                geoms.append(part)
+
+        if geoms:
+            return geoms
+
+    return None
+
+def flatten_polygons(polygons):
+    for polygon in polygons:
+        if polygon.type == 'MultiPolygon':
+            for p in polygon.geoms:
+                yield p
+        else:
+            yield polygon
+
+def flatten_linestrings(linestrings):
+    for linestring in linestrings:
+        if linestring.type == 'MultiLineString':
+            for ls in linestring.geoms:
+                yield ls
+        else:
+            yield linestring
+
+class LimitRTreeGeometry(object):
+    def __init__(self, polygons):
+        index = rtree.index.Index()
+        sub_polygons = []
+        part_idx = 0
+        for polygon in polygons:
+            for part in split_polygon_at_grid(polygon):
+                sub_polygons.append(part)
+                index.insert(part_idx, part.bounds)
+                part_idx += 1
+
+        self.polygons = sub_polygons
+        self.index = index
+
+    def intersection(self, geom):
+        intersection_ids = list(self.index.intersection(geom.bounds))
+
+        if not intersection_ids:
+            raise EmtpyGeometryError('No intersection or empty geometry')
+
+        intersections = []
+        for i in intersection_ids:
+
+            polygon = self.polygons[i]
+
+            if polygon.contains(geom):
+                return geom
+
+            if polygon.intersects(geom):
+                try:
+                    new_geom_part = polygon.intersection(geom)
+                    new_geom_part = filter_geometry_by_type(new_geom_part, geom.type)
+                    if new_geom_part:
+                        if len(intersection_ids) == 1:
+                            return new_geom_part
+
+                        if isinstance(new_geom_part, list):
+                            intersections.extend(new_geom_part)
+                        else:
+                            intersections.append(new_geom_part)
+                except TopologicalError:
+                        pass
+
+        if not intersections:
+            raise EmtpyGeometryError('No intersection or empty geometry')
+
+        # intersections from multiple sub-polygons
+        # try to merge them back to a single geometry
+        if geom.type.endswith('Polygon'):
+            union = cascaded_union(list(flatten_polygons(intersections)))
+        elif geom.type.endswith('LineString'):
+            union = linemerge(list(flatten_linestrings(intersections)))
+            if union.type == 'MultiLineString':
+                union = list(union.geoms)
+        elif geom.type == 'Point':
+            union = intersections[0]
+        else:
+            log.warn('unexpexted geometry type %s', geom.type)
+            raise EmtpyGeometryError()
+        return union
diff --git a/imposm/mapping.py b/imposm/mapping.py
index e07732e..7ef7c87 100644
--- a/imposm/mapping.py
+++ b/imposm/mapping.py
@@ -1,12 +1,12 @@
 # -:- encoding: UTF8 -:-
 # Copyright 2011 Omniscale (http://omniscale.com)
-# 
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #     http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -45,11 +45,11 @@ default_name_field = None
 def set_default_name_type(type, column_name='name'):
     """
     Set new default type for 'name' field.
-    
+
     ::
-        
+
         set_default_name_type(LocalizedName(['name:en', 'int_name', 'name']))
-    
+
     """
     global default_name_field
     default_name_field = column_name, type
@@ -90,11 +90,11 @@ class Mapping(object):
         self.name = name
         self.mapping = mapping
         self.fields = fields or tuple(self.fields)
+        self.limit_to_polygon = None
         if with_type_field is not None:
             # allow subclass to define other default by setting it as class variable
             self.with_type_field = with_type_field
-        if self.with_type_field:
-            self._add_type_field()
+        self._add_type_field()
         self._add_name_field()
         if field_filter:
             self.field_filter = field_filter
@@ -108,11 +108,18 @@ class Mapping(object):
                 self.fields = (default_name_field,) + self.fields
             else:
                 self.fields = (('name', Name()),) + self.fields
-        
+
     def _add_type_field(self):
         """
         Add type field.
         """
+        if not self.with_type_field:
+            return
+
+        for name, type_ in self.fields:
+            if name == 'type':
+                # do not add type field if already present
+                return
         self.fields = (('type', Type()), ) + self.fields
 
     @property
@@ -120,12 +127,12 @@ class Mapping(object):
         if not self._insert_stmt:
             self._insert_stmt = self.table('osm_' + self.name, self).insert_stmt
         return self._insert_stmt
-    
+
     def extra_field_names(self):
         extra_field_names = []
         for field_name, field_filter in self.field_filter:
             extra_field_names.append(field_name)
-        
+
         for field_name, field in self.fields:
             field_names = field.extra_fields()
             if field_names is not None:
@@ -133,27 +140,39 @@ class Mapping(object):
             else:
                 extra_field_names.append(field_name)
         return extra_field_names
-    
+
     def build_geom(self, osm_elem):
         try:
             geom = self.geom_builder.build_checked_geom(osm_elem)
+            if self.limit_to_polygon is not None:
+                geom = self.limit_to_polygon.intersection(geom)
             osm_elem.geom = geom
         except imposm.geom.InvalidGeometryError, ex:
             raise DropElem('invalid geometry: %s' % (ex, ))
-    
+        except imposm.geom.EmtpyGeometryError, ex:
+            raise DropElem(ex)
+
     def field_values(self, osm_elem):
         return [t.value(osm_elem.tags.get(n), osm_elem) for n, t in self.fields]
-    
+
+    def field_dict(self, osm_elem):
+        result = dict((n, t.value(osm_elem.tags.get(n), osm_elem)) for n, t in self.fields)
+        if self.with_type_field:
+            del result['type']
+        result[osm_elem.cls] = osm_elem.type
+        return result
+
     def filter(self, osm_elem):
         [t.filter(osm_elem.tags.get(n), osm_elem) for n, t in self.field_filter]
-    
+
     def __repr__(self):
         return '<Mapping for %s>' % self.name
 
 
 class TagMapper(object):
-    def __init__(self, mappings):
+    def __init__(self, mappings, limit_to=None):
         self.mappings = mappings
+        self.limit_to_polygon = limit_to
         self._init_map()
 
     def _init_map(self):
@@ -174,7 +193,7 @@ class TagMapper(object):
             elif mapping.table is PolygonTable:
                 tags = self.polygon_tags
                 add_to = self.polygon_mappings
-            
+
             for extra in mapping.extra_field_names():
                 tags.setdefault(extra, set()).add('__any__')
 
@@ -184,11 +203,14 @@ class TagMapper(object):
                     tags.setdefault(tag, set()).add(type)
                     add_to[tag].setdefault(type, []).append(mapping)
 
+            # add limit_to polygon to each mapping
+            mapping.limit_to_polygon = self.limit_to_polygon
+
     def for_nodes(self, tags):
         return self._mapping_for_tags(self.point_mappings, tags)
 
     def for_ways(self, tags):
-        return (self._mapping_for_tags(self.line_mappings, tags) + 
+        return (self._mapping_for_tags(self.line_mappings, tags) +
                 self._mapping_for_tags(self.polygon_mappings, tags))
 
     def for_relations(self, tags):
@@ -218,7 +240,7 @@ class TagMapper(object):
         tags = dict()
         for k, v in self.line_tags.iteritems():
             tags.setdefault(k, set()).update(v)
-        
+
         for k, v in self.polygon_tags.iteritems():
             tags.setdefault(k, set()).update(v)
         return self._tag_filter(tags)
@@ -272,7 +294,7 @@ class TagMapper(object):
                         new_mappings.append(proc)
                 if new_mappings:
                     result.append(((tag_name, tag_value), tuple(new_mappings)))
-        
+
         return result
 
 
@@ -287,7 +309,7 @@ class PolygonTable(object):
 class Points(Mapping):
     """
     Table class for point features.
-    
+
     :PostGIS datatype: POINT
     """
     table = PointTable
@@ -297,7 +319,7 @@ class Points(Mapping):
 class LineStrings(Mapping):
     """
     Table class for line string features.
-    
+
     :PostGIS datatype: LINESTRING
     """
     table = LineStringTable
@@ -307,7 +329,7 @@ class LineStrings(Mapping):
 class Polygons(Mapping):
     """
     Table class for polygon features.
-    
+
     :PostGIS datatype: GEOMETRY (POLYGON does not support multi-polygons)
     """
     table = PolygonTable
@@ -350,7 +372,7 @@ class UnionView(object):
         if 'type' not in self.fields and any(m.with_type_field for m in self.mappings):
             self.fields += (('type', None), )
 
-        
+
 class DropElem(Exception):
     pass
 
@@ -360,12 +382,12 @@ class FieldType(object):
         """
         List with names of fields (keys) that should be processed
         during read-phase.
-        
+
         Return ``None`` to use the field name from the mapping.
         Return ``[]`` if no extra fields (keys) are required.
         """
         return None
-    
+
     def value(self, val, osm_elem):
         return val
 
@@ -393,7 +415,7 @@ class Type(FieldType):
                 ('road_class', Type(),),
             ),
         )
-    
+
     :PostgreSQL datatype: VARCHAR(255)
 
     .. versionadded:: 2.4.0
@@ -411,11 +433,11 @@ class Class(FieldType):
     """
     Field for class values (i.e. the *key* of the mapped key/value).
 
-    Use this if you want to store the key that was used for 
+    Use this if you want to store the key that was used for
     this mapping. For example, the following mapping will
     create a column ``class`` that will have the value
     ``landuse`` or ``natural``, depending on the feature.
-    
+
     ::
 
         landusages = Polygons(
@@ -434,7 +456,7 @@ class Class(FieldType):
         )
 
     :PostgreSQL datatype: VARCHAR(255)
-    
+
     .. versionadded:: 2.4.0
 
     """
@@ -449,7 +471,7 @@ class Class(FieldType):
 class String(FieldType):
     """
     Field for string values.
-    
+
     :PostgreSQL datatype: VARCHAR(255)
     """
     column_type = "VARCHAR(255)"
@@ -457,14 +479,14 @@ class String(FieldType):
 class Name(String):
     """
     Field for name values.
-    
+
     Filters out common FixMe values.
-    
+
     :PostgreSQL datatype: VARCHAR(255)
 
     .. versionadded:: 2.3.0
     """
-    
+
     filter_out_names = set((
         'fixme', 'fix me', 'fix-me!',
         '0', 'none', 'n/a', 's/n',
@@ -486,18 +508,18 @@ class LocalizedName(Name):
     Field for localized name values.
     Checks different name keys and uses the first key with
     a valid value.
-    
+
     :param coalesce: list of name keys to check
     :PostgreSQL datatype: VARCHAR(255)
-    
+
     .. versionadded:: 2.3.0
     """
     def __init__(self, coalesce=['name', 'int_name']):
         self.coalesce_keys = coalesce
-    
+
     def extra_fields(self):
         return self.coalesce_keys
-    
+
     def value(self, val, osm_elem):
         for key in self.coalesce_keys:
             val = osm_elem.tags.get(key)
@@ -512,7 +534,7 @@ class Bool(FieldType):
     """
     Field for boolean values.
     Converts false, no, 0 to False and true, yes, 1 to True.
-    
+
     :PostgreSQL datatype: SMALLINT
     """
     # there was a reason this is not BOOL
@@ -543,7 +565,7 @@ class Direction(FieldType):
     Converts `yes`, `true` and `1` to ``1`` for one ways in the direction of
     the way, `-1` to ``-1`` for one ways against the direction of the way and
     ``0`` for all other values.
-    
+
     :PostgreSQL datatype: SMALLINT
     """
     column_type = "SMALLINT"
@@ -559,36 +581,36 @@ class Direction(FieldType):
 class PseudoArea(FieldType):
     """
     Field for the (pseudo) area of a polygon in square meters.
-    
+
     The value is just an approximation since the geometries are in
     EPSG:4326 and not in a equal-area projection. The approximation
     is good for smaller polygons (<1%) and should be precise enough
     to compare geometries for rendering order (larger below smaller).
-    
+
     The area of the geometry is multiplied by the cosine of
     the mid-latitude to compensate the reduced size towards
     the poles.
-    
+
     :PostgreSQL datatype: REAL
-    
+
     .. versionadded:: 2.3.0
     """
-    
+
     column_type = "REAL"
-    
+
     def value(self, val, osm_elem):
         area = osm_elem.geom.area
         if not area:
             return None
-        
+
         extent = osm_elem.geom.bounds
         mid_lat = extent[1] + (abs(extent[3] - extent[1]) / 2)
         sqr_deg = area * math.cos(math.radians(mid_lat))
-        
+
         # convert deg^2 to m^2
         sqr_m = (math.sqrt(sqr_deg) * (40075160 / 360))**2
         return sqr_m
-    
+
     def extra_fields(self):
         return []
 
@@ -597,7 +619,7 @@ class OneOfInt(FieldType):
     Field type for integer values.
     Converts values to integers, drops element if is not included in
     ``values``.
-    
+
     :PostgreSQL datatype: SMALLINT
     """
     column_type = "SMALLINT"
@@ -614,7 +636,7 @@ class Integer(FieldType):
     """
     Field type for integer values.
     Converts values to integers, defaults to ``NULL``.
-    
+
     :PostgreSQL datatype: INTEGER
     """
     column_type = "INTEGER"
@@ -628,12 +650,12 @@ class Integer(FieldType):
 class ZOrder(FieldType):
     """
     Field type for z-ordering based on the feature type.
-    
+
     :param types: list of mapped feature types,
         from highest to lowest ranking
     :PostgreSQL datatype: SMALLINT
     """
-    
+
     column_type = "SMALLINT"
 
     def __init__(self, types):
@@ -651,12 +673,12 @@ class ZOrder(FieldType):
 class WayZOrder(FieldType):
     """
     Field type for z-ordered based on highway types.
-    
+
     Ordering based on the osm2pgsql z-ordering:
     From ``roads`` = 3 to ``motorways`` = 9, ``railway`` = 7 and unknown = 0.
     Ordering changes with ``tunnels`` by -10, ``bridges`` by +10 and
     ``layer`` by 10 * ``layer``.
-    
+
     :PostgreSQL datatype: SMALLINT
     """
 
@@ -680,7 +702,7 @@ class WayZOrder(FieldType):
     }
 
     brunnel_bool = Bool()
-    
+
     def extra_fields(self):
         return []
 
diff --git a/imposm/psqldb.py b/imposm/psqldb.py
index 6d5846d..fdfff42 100644
--- a/imposm/psqldb.py
+++ b/imposm/psqldb.py
@@ -1,11 +1,11 @@
 # Copyright 2011 Omniscale (http://omniscale.com)
-# 
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #     http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,27 +18,38 @@ from os.path import join, dirname, exists
 
 db_create_template = """
 # run this as postgres user, eg:
-# imposm-pgsql > create_db.sh; sudo su postgres; sh ./create_db.sh
+# imposm-psqldb > create_db.sh; sudo su postgres; sh ./create_db.sh
 set -xe
 createuser --no-superuser --no-createrole --createdb ${user}
 createdb -E UTF8 -O ${user} ${dbname}
 createlang plpgsql ${dbname}
-psql -d ${dbname} -f ${postgis_sql}
-psql -d ${dbname} -f ${spatial_ref_sys_sql}
-psql -d ${dbname} -f ${epsg900913_sql}
-echo "ALTER TABLE geometry_columns OWNER TO ${user}; ALTER TABLE spatial_ref_sys OWNER TO ${user};" | psql -d ${dbname}
+${postgis}
+echo "ALTER TABLE spatial_ref_sys OWNER TO ${user};" | psql -d ${dbname}
 echo "ALTER USER ${user} WITH PASSWORD '${password}';" |psql -d ${dbname}
 echo "host\t${dbname}\t${user}\t127.0.0.1/32\tmd5" >> ${pg_hba}
 set +x
 echo "Done. Don't forget to restart postgresql!"
 """.strip()
 
-def find_sql_files(version, mapping):
-    
-    pg_hba = '/path/to/pg_hba.conf \t\t# <- CHANGE THIS PATH'
-    postgis_sql = '/path/to/postgis.sql \t\t# <- CHANGE THIS PATH'
-    spatial_ref_sys_sql = '/path/to/spatial_ref_sys.sql \t# <- CHANGE THIS PATH' 
-    
+postgis_create_template_15 = """
+psql -d ${dbname} -f ${postgis_sql}
+psql -d ${dbname} -f ${spatial_ref_sys_sql}
+psql -d ${dbname} -f ${epsg900913_sql}
+echo "ALTER TABLE geometry_columns OWNER TO ${user};" | psql -d ${dbname}
+""".strip()
+
+postgis_create_template_20 = """
+echo "CREATE EXTENSION postgis;" | psql -d ${dbname}
+echo "ALTER TABLE geometry_columns OWNER TO ${user};" | psql -d ${dbname}
+psql -d ${dbname} -f ${epsg900913_sql}
+""".strip()
+
+def find_sql_files(version, postgis_version, mapping):
+
+    pg_hba = '/path/to/pg_hba.conf \t# <- CHANGE THIS PATH'
+    postgis_sql = '/path/to/postgis.sql \t\t\t\t# <- CHANGE THIS PATH'
+    spatial_ref_sys_sql = '/path/to/spatial_ref_sys.sql \t\t\t# <- CHANGE THIS PATH'
+
     if version in ('8.3', 'auto'):
         p = '/usr/share/postgresql-8.3-postgis/lwpostgis.sql'
         if exists(p):
@@ -49,7 +60,7 @@ def find_sql_files(version, mapping):
         p = '/etc/postgresql/8.3/main/pg_hba.conf'
         if exists(p):
             pg_hba = p
-    
+
     if version in ('8.4', 'auto'):
         p = '/usr/share/postgresql/8.4/contrib/postgis.sql'
         if exists(p):
@@ -66,7 +77,7 @@ def find_sql_files(version, mapping):
         p = '/etc/postgresql/8.4/main/pg_hba.conf'
         if exists(p):
             pg_hba = p
-    
+
     if version in ('9.1', 'auto'):
         p = '/usr/share/postgresql/9.1/contrib/postgis-1.5/postgis.sql'
         if exists(p):
@@ -77,10 +88,15 @@ def find_sql_files(version, mapping):
         p = '/etc/postgresql/9.1/main/pg_hba.conf'
         if exists(p):
             pg_hba = p
-    
+
+    if postgis_version == '2.0':
+        postgis_sql = None
+        spatial_ref_sys_sql = None
+
     mapping['postgis_sql'] = postgis_sql
     mapping['spatial_ref_sys_sql'] = spatial_ref_sys_sql
     mapping['pg_hba'] = pg_hba
+    mapping['pg_version'] = postgis_version
 
 def main():
     usage = '%prog [options]'
@@ -89,8 +105,9 @@ def main():
     parser.add_option('--database', dest='dbname', metavar='osm', default='osm')
     parser.add_option('--user', dest='user', metavar='osm', default='osm')
     parser.add_option('--password', dest='password', metavar='osm', default='osm')
-    parser.add_option('--pg-version', dest='pg_version', metavar='8.3|8.4|auto', default='auto')
-    
+    parser.add_option('--pg-version', dest='pg_version', metavar='8.3|8.4|9.1|auto', default='auto')
+    parser.add_option('--postgis-version', dest='postgis_version', metavar='1.5|2.0', default='1.5')
+
     (options, args) = parser.parse_args()
 
     mapping = {
@@ -98,10 +115,15 @@ def main():
         'dbname': options.dbname,
         'password': options.password,
     }
-    
+
     mapping['epsg900913_sql'] = join(dirname(__file__), '900913.sql')
-    find_sql_files(options.pg_version, mapping)
-    
+    find_sql_files(options.pg_version, options.postgis_version, mapping)
+
+    if options.postgis_version == '2.0':
+        mapping['postgis'] = string.Template(postgis_create_template_20).substitute(mapping)
+    else:
+        mapping['postgis'] = string.Template(postgis_create_template_15).substitute(mapping)
+
     template = string.Template(db_create_template)
     print template.substitute(mapping)
 
diff --git a/imposm/reader.py b/imposm/reader.py
index 9768571..0a06974 100644
--- a/imposm/reader.py
+++ b/imposm/reader.py
@@ -1,11 +1,11 @@
 # Copyright 2011 Omniscale (http://omniscale.com)
-# 
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #     http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -16,7 +16,7 @@ from functools import partial
 from multiprocessing import Process, JoinableQueue
 
 from imposm.parser import OSMParser
-from imposm.util import ParserProgress, setproctitle
+from imposm.util import setproctitle
 
 class ImposmReader(object):
     def __init__(self, mapping, cache, pool_size=2, merge=False, logger=None):
@@ -33,22 +33,22 @@ class ImposmReader(object):
         coords_queue = JoinableQueue(512)
         ways_queue = JoinableQueue(128)
         relations_queue = JoinableQueue(128)
-        
-        log_proc = ParserProgress()
+
+        log_proc = self.logger()
         log_proc.start()
-        
+
         marshal = True
         if self.merge:
             # merging needs access to unmarshaled data
             marshal = False
-        
+
         estimates = {
             'coords': self.estimated_coords,
             'nodes': self.estimated_coords//50,
             'ways': self.estimated_coords//7,
             'relations': self.estimated_coords//1000,
         }
-        
+
         coords_writer = CacheWriterProcess(coords_queue, self.cache.coords_cache,
             estimates['coords'], log=partial(log_proc.log, 'coords'),
             marshaled_data=marshal)
@@ -64,27 +64,27 @@ class ImposmReader(object):
             estimates['ways'], merge=self.merge, log=partial(log_proc.log, 'ways'),
             marshaled_data=marshal)
         ways_writer.start()
-        
+
         relations_writer = CacheWriterProcess(relations_queue, self.cache.relations_cache,
             estimates['relations'], merge=self.merge, log=partial(log_proc.log, 'relations'),
             marshaled_data=marshal)
         relations_writer.start()
-        
+
         log_proc.message('coords: %dk nodes: %dk ways: %dk relations: %dk (estimated)' % (
             estimates['coords']/1000, estimates['nodes']/1000, estimates['ways']/1000,
             estimates['relations']/1000)
         )
-        
+
         # keep one CPU free for writer proc on hosts with 4 or more CPUs
         pool_size = self.pool_size if self.pool_size < 4 else self.pool_size - 1
-        
+
         parser = OSMParser(pool_size, nodes_callback=nodes_queue.put, coords_callback=coords_queue.put,
             ways_callback=ways_queue.put, relations_callback=relations_queue.put, marshal_elem_data=marshal)
 
         parser.nodes_tag_filter = self.mapper.tag_filter_for_nodes()
         parser.ways_tag_filter = self.mapper.tag_filter_for_ways()
         parser.relations_tag_filter = self.mapper.tag_filter_for_relations()
-        
+
         parser.parse(filename)
 
         coords_queue.put(None)
@@ -111,7 +111,7 @@ class CacheWriterProcess(Process):
         self.log = log
         self.marshaled_data = marshaled_data
         self.estimated_records = estimated_records
-    
+
     def run(self):
         # print 'creating %s (%d)' % (self.filename, self.estimated_records or 0)
         cache = self.cache(mode='w', estimated_records=self.estimated_records)
@@ -121,7 +121,7 @@ class CacheWriterProcess(Process):
             cache_put = cache.put
         while True:
             data = self.queue.get()
-            if data is None: 
+            if data is None:
                 self.queue.task_done()
                 break
             if self.merge:
diff --git a/imposm/test/test_dbimporter.py b/imposm/test/test_dbimporter.py
new file mode 100644
index 0000000..3a10cb8
--- /dev/null
+++ b/imposm/test/test_dbimporter.py
@@ -0,0 +1,100 @@
+# Copyright 2012 Omniscale (http://omniscale.com)
+# 
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from imposm.dbimporter import DictBasedImporter, TupleBasedImporter
+from imposm import defaultmapping
+
+from nose.tools import eq_, assert_almost_equal
+
+class TestDictBasedImporter(object):
+    def setup(self):
+        dummy_db = None
+        mapper = None
+        self.importer = DictBasedImporter(None, dummy_db, mapper, None,
+            dry_run=False)
+
+    def test_insert(self):
+        mappings = [
+            (('highway', 'secondary'), [defaultmapping.mainroads]),
+            (('railway', 'tram'), [defaultmapping.railways]),
+            (('landusage', 'grass'), [defaultmapping.landusages]),
+        ]
+        assert self.importer.insert(mappings, 1234, [(0, 0), (1, 0), (1, 1), (0, 0)],
+            {'highway': 'secondary', 'railway': 'tram', 'oneway': '1',
+             'name': 'roundabout',
+            }
+        )
+
+        # get items, sort by mapping_names so that landusages comes first
+        queue_items = [self.importer.db_queue.get(), self.importer.db_queue.get()]
+        queue_items.sort(key=lambda x: x['mapping_names'])
+
+        polygon_item = queue_items[0]
+        linestring_item = queue_items[1]
+
+        eq_(linestring_item['mapping_names'], ['mainroads', 'railways'])
+        eq_(linestring_item['osm_id'], 1234)
+        eq_(linestring_item['fields'], {
+            'highway': 'secondary',
+            'railway': 'tram',
+            'oneway': 1,
+            'z_order': 7,
+            'bridge': 0,
+            'tunnel': 0,
+            'name': 'roundabout',
+            'ref': None
+        })
+        eq_(polygon_item['mapping_names'], ['landusages'])
+        eq_(polygon_item['osm_id'], 1234)
+        assert_almost_equal(polygon_item['fields']['area'], 6195822904.182782)
+        del polygon_item['fields']['area']
+        eq_(polygon_item['fields'], {
+            'z_order': 27,
+            'landusage': 'grass',
+            'name': 'roundabout',
+        })
+
+class TestTupleBasedImporter(object):
+    def setup(self):
+        dummy_db = None
+        mapper = None
+        self.importer = TupleBasedImporter(None, dummy_db, mapper, None,
+            dry_run=False)
+
+    def test_insert(self):
+        mappings = [
+            (('highway', 'secondary'), [defaultmapping.mainroads]),
+            (('railway', 'tram'), [defaultmapping.railways]),
+            (('landusage', 'grass'), [defaultmapping.landusages]),
+        ]
+        assert self.importer.insert(mappings, 1234, [(0, 0), (1, 0), (1, 1), (0, 0)],
+            {'highway': 'secondary', 'railway': 'tram', 'oneway': '1',
+             'name': 'roundabout',
+            }
+        )
+
+        mainroads_item = self.importer.db_queue.get()
+        eq_(mainroads_item[0], defaultmapping.mainroads)
+        eq_(mainroads_item[1], 1234)
+        eq_(mainroads_item[3], ['roundabout', 'secondary', 0, 0, 1, None, 5])
+
+        railways_item = self.importer.db_queue.get()
+        eq_(railways_item[0], defaultmapping.railways)
+        eq_(railways_item[1], 1234)
+        eq_(railways_item[3], ['roundabout', 'tram', 0, 0, 7])
+
+        landusages_item = self.importer.db_queue.get()
+        eq_(landusages_item[0], defaultmapping.landusages)
+        eq_(landusages_item[1], 1234)
+        eq_(landusages_item[3], ['roundabout', 'grass', 6195822904.182782, 27])
diff --git a/imposm/test/test_geom.py b/imposm/test/test_geom.py
deleted file mode 100644
index de56b56..0000000
--- a/imposm/test/test_geom.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from imposm.geom import LineStringBuilder
-from shapely.ops import linemerge
-from nose.tools import eq_
-
-class TestLineStringBuilder(object):
-    def test_split(self):
-        builder = LineStringBuilder()
-        geom = builder.to_geom([(0, 0), (1, 1), (2, 2), (3, 3)], max_length=3)
-        assert isinstance(geom, list)
-        eq_(len(geom), 2)
-        
-        merged = linemerge(geom)
-        eq_(merged.type, 'LineString')
-
-    def test_split(self):
-        builder = LineStringBuilder()
-        geom = builder.to_geom([(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], max_length=2)
-        assert isinstance(geom, list)
-        eq_(len(geom), 3)
-        
-        merged = linemerge(geom)
-        eq_(merged.type, 'LineString')
-
-    def test_split_same_length(self):
-        builder = LineStringBuilder()
-        geom = builder.to_geom([(0, 0), (1, 1), (2, 2), (3, 3)], max_length=4)
-        eq_(geom.type, 'LineString')
-
-
-    def test_split_short(self):
-        builder = LineStringBuilder()
-        geom = builder.to_geom([(0, 0), (1, 1), (2, 2), (3, 3)], max_length=10)
-        eq_(geom.type, 'LineString')
diff --git a/imposm/test/test_imported.py b/imposm/test/test_imported.py
index 41c9948..13788cf 100644
--- a/imposm/test/test_imported.py
+++ b/imposm/test/test_imported.py
@@ -1,11 +1,11 @@
 # Copyright 2011 Omniscale (http://omniscale.com)
-# 
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #     http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -42,36 +42,38 @@ def setup_module():
     os.chdir(temp_dir)
     test_osm_file = os.path.join(os.path.dirname(__file__), 'test.out.osm')
     with capture_out():
-        imposm.app.main(['--read', test_osm_file, '--write', '-d', db_conf.db, '--host', db_conf.host,
-            '--proj', db_conf.proj])
+        print db_conf.password
+        imposm.app.main(['--read', test_osm_file, '--write',
+            '--proj', db_conf.proj, '--table-prefix', db_conf.prefix,
+            '--connect', 'postgis://%(user)s:%(password)s@%(host)s:%(port)s/%(db)s' % db_conf])
 
 class TestImported(object):
     def __init__(self):
         self.db = imposm.db.config.DB(db_conf)
-    
+
     def test_point(self):
         cur = self.db.cur
-        cur.execute('select osm_id, name, ST_AsText(geometry) from osm_new_places where osm_id = 1')
+        cur.execute('select osm_id, name, ST_AsText(geometry) from %splaces where osm_id = 1' % db_conf.prefix)
         results = cur.fetchall()
         eq_(len(results), 1)
         eq_(results[0], (1, 'Foo', 'POINT(13 47.5)'))
 
     def test_way(self):
         cur = self.db.cur
-        cur.execute('select osm_id, name, ST_AsText(geometry) from osm_new_landusages where osm_id = 1001')
+        cur.execute('select osm_id, name, ST_AsText(geometry) from %slandusages where osm_id = 1001' % db_conf.prefix)
         results = cur.fetchall()
         eq_(len(results), 1)
         eq_(results[0][:-1], (1001, 'way 1001',))
         eq_(roundwkt(results[0][-1]), 'POLYGON((13.0 47.5,14.5 50.0,16.5 49.0,17.0 47.0,14.5 45.5,13.0 47.5),(14.0 47.5,15.0 47.0,15.5 48.0,14.5 48.5,14.0 47.5))')
 
-        cur.execute('select osm_id, name, ST_AsText(geometry) from osm_new_landusages where osm_id = 2001')
+        cur.execute('select osm_id, name, ST_AsText(geometry) from %slandusages where osm_id = 2001' % db_conf.prefix)
         results = cur.fetchall()
         eq_(len(results), 1)
         eq_(results[0][:-1], (2001, 'way 2001',))
         eq_(roundwkt(results[0][-1]), 'POLYGON((23.0 47.5,24.5 50.0,26.5 49.0,27.0 47.0,24.5 45.5,23.0 47.5),(24.5 47.0,25.5 46.5,26.0 47.5,25.0 47.5,24.5 47.0),(24.2 48.25,25.25 47.7,25.7 48.8,24.7 49.25,24.2 48.25))')
 
 
-        cur.execute('select osm_id, name, ST_AsText(geometry) from osm_new_landusages where osm_id = 3001')
+        cur.execute('select osm_id, name, ST_AsText(geometry) from %slandusages where osm_id = 3001' % db_conf.prefix)
         results = cur.fetchall()
         eq_(len(results), 1)
         eq_(results[0][:-1], (3001, 'way 3002',))
@@ -86,10 +88,10 @@ def roundwkt(wkt):
 def teardown_module():
     if old_cwd:
         os.chdir(old_cwd)
-    
+
     if temp_dir and os.path.exists(temp_dir):
         shutil.rmtree(temp_dir)
-    
+
 
 @contextmanager
 def capture_out():
@@ -105,4 +107,3 @@ def capture_out():
     finally:
         sys.stdout = old_stdout
         sys.stderr = old_stderr
-    
\ No newline at end of file
diff --git a/imposm/test/test_limit_to.py b/imposm/test/test_limit_to.py
new file mode 100644
index 0000000..e99babe
--- /dev/null
+++ b/imposm/test/test_limit_to.py
@@ -0,0 +1,85 @@
+# Copyright 2012 Omniscale (http://omniscale.com)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from imposm.geom import LimitPolygonGeometry, EmtpyGeometryError
+from shapely.wkt import loads
+
+from nose.tools import raises
+
+class TestLimitPolygonGeometry(object):
+
+    @raises(EmtpyGeometryError)
+    def test_linestring_no_intersection(self):
+        geom = 'POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))'
+        limit_to = LimitPolygonGeometry(loads(geom))
+        limit_to.intersection(loads('LINESTRING(-100 -100, -50 -50)'))
+
+    def test_linestring(self):
+        geom = 'POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))'
+        limit_to = LimitPolygonGeometry(loads(geom))
+        geom = limit_to.intersection(loads('LINESTRING(-10 -10, 20 20)'))
+        assert geom.almost_equals(loads('LINESTRING(0 0, 10 10)'))
+
+    def test_linestring_contained(self):
+        geom = 'POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))'
+        limit_to = LimitPolygonGeometry(loads(geom))
+        test_geom = loads('LINESTRING(1 1, 9 9)')
+        geom = limit_to.intersection(test_geom)
+        # should return unmodified input geometry
+        assert geom is test_geom
+
+    def test_linestring_multilinestring_result(self):
+        geom = 'POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))'
+        limit_to = LimitPolygonGeometry(loads(geom))
+        geom = limit_to.intersection(loads('LINESTRING(-10 -20, 5 10, 20 -20)'))
+        assert isinstance(geom, list)
+        assert geom[0].almost_equals(loads('LINESTRING(0 0, 5 10)'))
+        assert geom[1].almost_equals(loads('LINESTRING(5 10, 10 0)'))
+
+    @raises(EmtpyGeometryError)
+    def test_linestring_point_result(self):
+        geom = 'POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))'
+        limit_to = LimitPolygonGeometry(loads(geom))
+        geom = limit_to.intersection(loads('LINESTRING(-10 -10, 0 0)'))
+
+    def test_linestring_mixed_result(self):
+        geom = 'POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))'
+        limit_to = LimitPolygonGeometry(loads(geom))
+        geom = limit_to.intersection(loads('LINESTRING(0 0, 5 -10, 5 10)'))
+        # point and linestring, point not returned
+        assert isinstance(geom, list)
+        assert len(geom) == 1
+        assert geom[0].almost_equals(loads('LINESTRING(5 0, 5 10)'))
+
+    def test_polygon_mixed_result(self):
+        geom = 'POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))'
+        limit_to = LimitPolygonGeometry(loads(geom))
+        test_geom = loads('POLYGON((0 -10, 0 5, 2.5 -5, 5 0, 7.5 -5, 10 5, 10 -10, 0 -10))')
+        geom = limit_to.intersection(test_geom)
+        # point and two polygons, point not returned
+        assert isinstance(geom, list)
+        assert len(geom) == 2
+        assert geom[0].almost_equals(loads('POLYGON((1.25 0, 0 0, 0 5, 1.25 0))'))
+        assert geom[1].almost_equals(loads('POLYGON((10 0, 8.75 0, 10 5, 10 0))'))
+
+    def test_polygon_multipolygon_result(self):
+        geom = 'POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))'
+        limit_to = LimitPolygonGeometry(loads(geom))
+        test_geom = loads('POLYGON((0 -10, 0 5, 2.5 -5, 5 -1, 7.5 -5, 10 5, 10 -10, 0 -10))')
+        geom = limit_to.intersection(test_geom)
+        # similar to above, but point does not touch the box, so we should get
+        # a single multipolygon
+        assert geom.almost_equals(loads(
+            'MULTIPOLYGON(((1.25 0, 0 0, 0 5, 1.25 0)),'
+            '((10 0, 8.75 0, 10 5, 10 0)))'))
diff --git a/imposm/util.py b/imposm/util/__init__.py
similarity index 95%
rename from imposm/util.py
rename to imposm/util/__init__.py
index 968f917..b0fa0dc 100644
--- a/imposm/util.py
+++ b/imposm/util/__init__.py
@@ -1,11 +1,11 @@
 # Copyright 2011 Omniscale (http://omniscale.com)
-# 
+#
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
-# 
+#
 #     http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -41,10 +41,12 @@ class Timer(object):
         self.logger.message('%s took %s' % (self.title, format_total_time(seconds)))
 
 class ParserProgress(multiprocessing.Process):
+    log_every_seconds = 0.2
+
     def __init__(self):
         self.queue = multiprocessing.Queue()
         multiprocessing.Process.__init__(self)
-        
+
     def run(self):
         last_log = time.time()
         counters = {'coords': 0, 'nodes':0, 'ways':0, 'relations':0}
@@ -52,13 +54,13 @@ class ParserProgress(multiprocessing.Process):
             log_statement = self.queue.get()
             if log_statement is None:
                 break
-            
+
             log_type, incr = log_statement
             counters[log_type] += incr
-            if time.time() - last_log > 0.2:
+            if time.time() - last_log > self.log_every_seconds:
                 last_log = time.time()
                 self.print_log(counters)
-        
+
     @staticmethod
     def message(msg):
         print >>sys.stderr, "[%s] %s" % (timestamp(), msg)
@@ -73,16 +75,21 @@ class ParserProgress(multiprocessing.Process):
             int(counters['relations']/1000)
         ),
         sys.stderr.flush()
-    
+
     def log(self, log_type, incr):
         self.queue.put((log_type, incr))
-    
+
     def stop(self):
         sys.stderr.write('\n')
         sys.stderr.flush()
         self.queue.put(None)
 
+class QuietParserProgress(ParserProgress):
+    log_every_seconds = 60
+
 class ProgressLog(object):
+    log_every_seconds = 0.2
+
     def __init__(self, title=None, total=None):
         self.count = 0
         self.total = total
@@ -91,12 +98,12 @@ class ProgressLog(object):
         self.start_time = time.time()
         self.last_log = time.time()
         self.print_log()
-    
+
     @staticmethod
     def message(msg):
         print >>sys.stderr, "[%s] %s" % (timestamp(), msg)
         sys.stderr.flush()
-    
+
     def log(self, value=None, step=1):
         before = self.count//1000
         if value:
@@ -105,16 +112,16 @@ class ProgressLog(object):
             self.count += step
         if self.count//1000 > before:
             self.print_log()
-    
+
     def print_log(self):
-        if time.time() - self.last_log > 0.2:
+        if time.time() - self.last_log > self.log_every_seconds:
             self.last_log = time.time()
             print >>sys.stderr, "[%s] %s: %dk%s\r" % (
                 timestamp(), self.title,
                 int(self.count/1000), self._total,
             ),
             sys.stderr.flush()
-    
+
     def stop(self):
         print >>sys.stderr
         seconds = time.time() - self.start_time
@@ -123,9 +130,12 @@ class ProgressLog(object):
             timestamp(), self.title, total_time, self.count, self.count/seconds)
         sys.stderr.flush()
 
+class QuietProgressLog(ProgressLog):
+    log_every_seconds = 60
+
 def timestamp():
     return datetime.datetime.now().strftime('%H:%M:%S')
-    
+
 def format_total_time(seconds):
     h, m, s = seconds_to_hms(seconds)
     res = '%-02ds' % s
@@ -144,11 +154,11 @@ class NullLog(object):
     def log_node(self):
         pass
     node = log_node
-    
+
     def log_way(self):
         pass
     way = log_way
-    
+
     def log_relation(self):
         pass
     relation = log_relation
@@ -159,14 +169,14 @@ class MMapReader(object):
         self.m = m
         self.m.seek(0)
         self.size = size
-    
+
     def read(self, size=None):
         if size is None:
             size = self.size - self.m.tell()
         else:
             size = min(self.size - self.m.tell(), size)
         return self.m.read(size)
-        
+
     def readline(self):
         cur_pos = self.m.tell()
         if cur_pos >= self.size:
@@ -174,7 +184,7 @@ class MMapReader(object):
         nl_pos = self.m.find('\n')
         self.m.seek(cur_pos)
         return self.m.read(nl_pos-cur_pos)
-    
+
     def seek(self, n):
         self.m.seek(n)
 
@@ -185,7 +195,7 @@ class MMapPool(object):
         self.pool = [mmap.mmap(-1, mmap_size) for _ in range(n)]
         self.free_mmaps = set(range(n))
         self.free_queue = JoinableQueue()
-        
+
     def new(self):
         if not self.free_mmaps:
             self.free_mmaps.add(self.free_queue.get())
@@ -198,7 +208,7 @@ class MMapPool(object):
                 break
         mmap_idx = self.free_mmaps.pop()
         return mmap_idx, self.pool[mmap_idx]
-    
+
     def join(self):
         while len(self.free_mmaps) < self.n:
             self.free_mmaps.add(self.free_queue.get())
@@ -238,6 +248,5 @@ def estimate_records(files):
         if f.endswith('.pbf'):
             fsize *= 15 # observed pbf compression factor on osm data
         records += fsize/200
-    
+
     return int(records)
-    
\ No newline at end of file
diff --git a/imposm/util/geom.py b/imposm/util/geom.py
new file mode 100644
index 0000000..494bf29
--- /dev/null
+++ b/imposm/util/geom.py
@@ -0,0 +1,144 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2010 Omniscale <http://omniscale.de>
+# 
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#    http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import division, with_statement
+
+import codecs
+from functools import partial
+
+import logging
+log = logging.getLogger(__name__)
+
+try:
+    import shapely.wkt
+    import shapely.geometry
+    import shapely.ops
+    import shapely.prepared
+    geom_support = True
+except ImportError:
+    geom_support = False
+
+def require_geom_support():
+    if not geom_support:
+        raise ImportError('Shapely required for geometry support')
+
+
+def load_datasource(datasource, where=None):
+    """
+    Loads polygons from any OGR datasource.
+    
+    Returns the bbox and a Shapely MultiPolygon with
+    the loaded geometries.
+    """
+    from imposm.util.ogr import OGRShapeReader
+    
+    polygons = []
+    for wkt in OGRShapeReader(datasource).wkts(where):
+        geom = shapely.wkt.loads(wkt)
+        if geom.type == 'Polygon':
+            polygons.append(geom)
+        elif geom.type == 'MultiPolygon':
+            for p in geom:
+                polygons.append(p)
+        else:
+            log.info('skipping %s geometry from %s: not a Polygon/MultiPolygon',
+                geom.type, datasource)
+    return polygons
+
+def load_polygons(geom_files):
+    """
+    Loads WKT polygons from one or more text files.
+    
+    Returns the bbox and a Shapely MultiPolygon with
+    the loaded geometries.
+    """
+    polygons = []
+    if isinstance(geom_files, basestring):
+        geom_files = [geom_files]
+    
+    for geom_file in geom_files:
+        # open with utf-8-sig encoding to get rid of UTF8 BOM from MS Notepad
+        with codecs.open(geom_file, encoding='utf-8-sig') as f:
+            polygons.extend(load_polygon_lines(f, source=geom_files))
+    
+    return polygons
+
+def load_polygon_lines(line_iter, source='<string>'):
+    polygons = []
+    for line in line_iter:
+        if not line.strip():
+            continue
+        geom = shapely.wkt.loads(line)
+        if geom.type == 'Polygon':
+            polygons.append(geom)
+        elif geom.type == 'MultiPolygon':
+            for p in geom:
+                polygons.append(p)
+        else:
+            log.info('ignoring non-polygon geometry (%s) from %s',
+                geom.type, source)
+
+    return polygons
+    
+def build_multipolygon(polygons, simplify=False):
+    if polygons:
+        mp = shapely.geometry.MultiPolygon(polygons)
+        if simplify:
+            mp = simplify_geom(mp)
+    else:
+        mp = shapely.geometry.Polygon()
+    return mp.bounds, mp
+
+def simplify_geom(geom):
+    bounds = geom.bounds
+    w, h = bounds[2] - bounds[0], bounds[3] - bounds[1]
+    tolerance = min((w/1e6, h/1e6))
+    return geom.simplify(tolerance, preserve_topology=True)
+
+def bbox_polygon(bbox):
+    """
+    Create Polygon that covers the given bbox.
+    """
+    return shapely.geometry.Polygon((
+        (bbox[0], bbox[1]),
+        (bbox[2], bbox[1]),
+        (bbox[2], bbox[3]),
+        (bbox[0], bbox[3]),
+        ))
+
+def transform_geometry(from_srs, to_srs, geometry):
+    transf = partial(transform_xy, from_srs, to_srs)
+    
+    if geometry.type == 'Polygon':
+        return transform_polygon(transf, geometry)
+    
+    if geometry.type == 'MultiPolygon':
+        return transform_multipolygon(transf, geometry)
+    
+    raise ValueError('cannot transform %s' % geometry.type)
+
+def transform_polygon(transf, polygon):
+    ext = transf(polygon.exterior.xy)
+    ints = [transf(ring.xy) for ring in polygon.interiors]
+    return shapely.geometry.Polygon(ext, ints)
+
+def transform_multipolygon(transf, multipolygon):
+    transformed_polygons = []
+    for polygon in multipolygon:
+        transformed_polygons.append(transform_polygon(transf, polygon))
+    return shapely.geometry.MultiPolygon(transformed_polygons)
+
+def transform_xy(from_srs, to_srs, xy):
+    return list(from_srs.transform_to(to_srs, zip(*xy)))
diff --git a/imposm/util/lib.py b/imposm/util/lib.py
new file mode 100644
index 0000000..9458e53
--- /dev/null
+++ b/imposm/util/lib.py
@@ -0,0 +1,107 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2010 Omniscale <http://omniscale.de>
+# 
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#    http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+ctypes utilities.
+"""
+
+import sys
+import os
+
+from ctypes import CDLL
+from ctypes.util import find_library as _find_library
+
+
+default_locations = dict(
+    darwin=dict(
+        paths = ['/opt/local/lib'],
+        exts = ['.dylib'],
+    ),
+    win32=dict(
+        paths = [os.path.dirname(os.__file__) + '/../../../DLLs'],
+        exts = ['.dll']
+    ),
+    other=dict(
+        paths = [], # MAPPROXY_LIB_PATH will add paths here
+        exts = ['.so']
+    ),
+)
+
+# TODO does this work for imposm too?
+
+additional_lib_path = os.environ.get('MAPPROXY_LIB_PATH')
+if additional_lib_path:
+    additional_lib_path = additional_lib_path.split(os.pathsep)
+    additional_lib_path.reverse()
+    for locs in default_locations.values():
+        for path in additional_lib_path:
+            locs['paths'].insert(0, path)
+
+def load_library(lib_names, locations_conf=default_locations):
+    """
+    Load the `lib_name` library with ctypes.
+    If ctypes.util.find_library does not find the library,
+    different path and filename extensions will be tried.
+    
+    Retruns the loaded library or None.
+    """
+    if isinstance(lib_names, basestring):
+        lib_names = [lib_names]
+    
+    for lib_name in lib_names:
+        lib = load_library_(lib_name, locations_conf)
+        if lib is not None: return lib
+
+def load_library_(lib_name, locations_conf=default_locations):
+    lib_path = find_library(lib_name)
+    
+    if lib_path:
+        return CDLL(lib_path)
+    
+    if sys.platform in locations_conf:
+        paths = locations_conf[sys.platform]['paths']
+        exts = locations_conf[sys.platform]['exts']
+        lib_path = find_library(lib_name, paths, exts)
+    else:
+        paths = locations_conf['other']['paths']
+        exts = locations_conf['other']['exts']
+        lib_path = find_library(lib_name, paths, exts)
+    
+    if lib_path:
+        return CDLL(lib_path)
+        
+
+def find_library(lib_name, paths=None, exts=None):
+    """
+    Search for library in all permutations of `paths` and `exts`.
+    If nothing is found None is returned.
+    """
+    if not paths or not exts:
+        lib = _find_library(lib_name)
+        if lib is None and lib_name.startswith('lib'):
+            lib = _find_library(lib_name[3:])
+        return lib
+    
+    for lib_name in [lib_name] + ([lib_name[3:]] if lib_name.startswith('lib') else []):
+        for path in paths:
+            for ext in exts:
+                lib_path = os.path.join(path, lib_name + ext)
+                if os.path.exists(lib_path):
+                    return lib_path
+    
+    return None
+
+if __name__ == '__main__':
+    print load_library(sys.argv[1])
diff --git a/imposm/util/ogr.py b/imposm/util/ogr.py
new file mode 100644
index 0000000..140c214
--- /dev/null
+++ b/imposm/util/ogr.py
@@ -0,0 +1,130 @@
+# This file is part of the MapProxy project.
+# Copyright (C) 2010 Omniscale <http://omniscale.de>
+# 
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#    http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from imposm.util.lib import load_library
+import ctypes
+from ctypes import c_void_p, c_char_p, c_int
+
+def init_libgdal():
+    libgdal = load_library(['libgdal', 'libgdal1'])
+    
+    if not libgdal: return
+    
+    libgdal.OGROpen.argtypes = [c_char_p, c_int, c_void_p]
+    libgdal.OGROpen.restype = c_void_p
+
+    libgdal.CPLGetLastErrorMsg.argtypes = []
+    libgdal.CPLGetLastErrorMsg.restype = c_char_p
+
+    libgdal.OGR_DS_GetLayer.argtypes = [c_void_p, c_int]
+    libgdal.OGR_DS_GetLayer.restype = c_void_p
+
+    libgdal.OGR_FD_GetName.argtypes = [c_void_p]
+    libgdal.OGR_FD_GetName.restype = c_char_p
+
+    libgdal.OGR_L_GetLayerDefn.argtypes = [c_void_p]
+    libgdal.OGR_L_GetLayerDefn.restype = c_void_p
+
+    libgdal.OGR_DS_Destroy.argtypes = [c_void_p]
+
+    libgdal.OGR_DS_ExecuteSQL.argtypes = [c_void_p, c_char_p, c_void_p, c_char_p]
+    libgdal.OGR_DS_ExecuteSQL.restype = c_void_p
+    libgdal.OGR_DS_ReleaseResultSet.argtypes = [c_void_p, c_void_p]
+
+    libgdal.OGR_L_ResetReading.argtypes = [c_void_p]
+    libgdal.OGR_L_GetNextFeature.argtypes = [c_void_p]
+    libgdal.OGR_L_GetNextFeature.restype = c_void_p
+
+    libgdal.OGR_F_Destroy.argtypes = [c_void_p]
+
+    libgdal.OGR_F_GetGeometryRef.argtypes = [c_void_p]
+    libgdal.OGR_F_GetGeometryRef.restype = c_void_p
+
+    libgdal.OGR_G_ExportToWkt.argtypes = [c_void_p, ctypes.POINTER(c_char_p)]
+    libgdal.OGR_G_ExportToWkt.restype = c_void_p
+
+    libgdal.VSIFree.argtypes = [c_void_p]
+
+    libgdal.OGRRegisterAll()
+    
+    return libgdal
+
+libgdal = init_libgdal()
+
+class OGRShapeReaderError(Exception):
+    pass
+
+class OGRShapeReader(object):
+    def __init__(self, datasource):
+        self.datasource = datasource
+        self.opened = False
+        self._ds = None
+        
+    def open(self):
+        if self.opened: return
+        self._ds = libgdal.OGROpen(self.datasource, False, None)
+        if self._ds is None:
+            msg = libgdal.CPLGetLastErrorMsg()
+            if not msg:
+                msg = 'failed to open %s' % self.datasource
+            raise OGRShapeReaderError(msg)
+
+    def wkts(self, where=None):
+        if not self.opened: self.open()
+        
+        if where:
+            if not where.lower().startswith('select'):
+                layer = libgdal.OGR_DS_GetLayer(self._ds, 0)
+                layer_def = libgdal.OGR_L_GetLayerDefn(layer)
+                name = libgdal.OGR_FD_GetName(layer_def)
+                where = 'select * from %s where %s' % (name, where)
+            layer = libgdal.OGR_DS_ExecuteSQL(self._ds, where, None, None)
+        else:
+            layer = libgdal.OGR_DS_GetLayer(self._ds, 0)
+        if layer is None:
+            msg = libgdal.CPLGetLastErrorMsg()
+            raise OGRShapeReaderError(msg)
+        
+        libgdal.OGR_L_ResetReading(layer)
+        while True:
+            feature = libgdal.OGR_L_GetNextFeature(layer)
+            if feature is None:
+                break
+            geom = libgdal.OGR_F_GetGeometryRef(feature)
+            res = c_char_p()
+            libgdal.OGR_G_ExportToWkt(geom, ctypes.byref(res))
+            yield res.value
+            libgdal.VSIFree(res)
+            libgdal.OGR_F_Destroy(feature)
+        
+        if where:
+            libgdal.OGR_DS_ReleaseResultSet(self._ds, layer)
+    
+    def close(self):
+        if self.opened:
+            libgdal.OGR_DS_Destroy(self._ds)
+            self.opened = False
+    
+    def __del__(self):
+        self.close()
+        
+if __name__ == '__main__':
+    import sys
+    reader = OGRShapeReader(sys.argv[1])
+    where = None
+    if len(sys.argv) == 3:
+        where = sys.argv[2]
+    for wkt in reader.wkts(where):
+        print wkt
\ No newline at end of file
diff --git a/imposm/version.py b/imposm/version.py
index 9ef161f..ed295c8 100644
--- a/imposm/version.py
+++ b/imposm/version.py
@@ -12,4 +12,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-__version__ = '2.4.0'
+__version__ = '2.5.0'
diff --git a/imposm/writer.py b/imposm/writer.py
index 5b9067e..9fb9db1 100644
--- a/imposm/writer.py
+++ b/imposm/writer.py
@@ -14,9 +14,23 @@
 
 from multiprocessing import Process, JoinableQueue
 
-from imposm.dbimporter import NodeProcess, WayProcess, RelationProcess
+from imposm.dbimporter import NodeProcessTuple, WayProcessTuple, RelationProcessTuple
+from imposm.dbimporter import NodeProcessDict, WayProcessDict, RelationProcessDict
 from imposm.util import create_pool, shutdown_pool
 
+import_processes = {
+    'tuple': {
+        'node': NodeProcessTuple,
+        'way': WayProcessTuple,
+        'relation': RelationProcessTuple,
+    },
+    'dict': {
+        'node': NodeProcessDict,
+        'way': WayProcessDict,
+        'relation': RelationProcessDict,
+    }
+}
+
 class ImposmWriter(object):
     def __init__(self, mapping, db, cache, pool_size=2, logger=None, dry_run=False):
         self.mapping = mapping
@@ -55,7 +69,8 @@ class ImposmWriter(object):
         way_marker = WayMarkerProcess(inserted_way_queue, self.cache, self.logger)
         way_marker.start()
 
-        self._write_elem(RelationProcess, cache, log, self.pool_size, [inserted_way_queue])
+        self._write_elem(import_processes[self.db.insert_data_format]['relation'],
+            cache, log, self.pool_size, [inserted_way_queue])
 
         inserted_way_queue.put(None)
         way_marker.join()
@@ -63,13 +78,15 @@ class ImposmWriter(object):
     def ways(self):
         cache = self.cache.ways_cache()
         log = self.logger('ways', len(cache))
-        self._write_elem(WayProcess, cache, log, self.pool_size)
+        self._write_elem(import_processes[self.db.insert_data_format]['way'],
+            cache, log, self.pool_size)
         self.cache.remove_inserted_way_cache()
 
     def nodes(self):
         cache = self.cache.nodes_cache()
         log = self.logger('nodes', len(cache))
-        self._write_elem(NodeProcess, cache, log, self.pool_size)
+        self._write_elem(import_processes[self.db.insert_data_format]['node'],
+            cache, log, self.pool_size)
 
 
 class WayMarkerProcess(Process):
diff --git a/setup.py b/setup.py
index e9f7d04..84386da 100644
--- a/setup.py
+++ b/setup.py
@@ -84,7 +84,7 @@ setup(
     name = "imposm",
     version = version,
     description='OpenStreetMap importer for PostGIS.',
-    long_description=open('README').read() + open('CHANGES').read(),
+    long_description=open('README.rst').read() + open('CHANGES').read(),
     author = "Oliver Tonnhofer",
     author_email = "olt at omniscale.de",
     url='http://imposm.org/',

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/imposm.git



More information about the Pkg-grass-devel mailing list