[med-svn] [mypy] 01/01: New upstream version 0.4.5

Michael Crusoe misterc-guest at moszumanska.debian.org
Sat Oct 8 09:13:19 UTC 2016


This is an automated email from the git hooks/post-receive script.

misterc-guest pushed a commit to annotated tag upstream/0.4.5
in repository mypy.

commit f3da096533c29fbb75643aefd35ad8056c6f6b60
Author: Michael R. Crusoe <michael.crusoe at gmail.com>
Date:   Sat Oct 8 02:03:51 2016 -0700

    New upstream version 0.4.5
---
 PKG-INFO                                          |   4 +-
 mypy/binder.py                                    |  29 +-
 mypy/build.py                                     | 234 ++++----
 mypy/checker.py                                   | 258 ++++----
 mypy/checkexpr.py                                 | 177 ++++--
 mypy/checkmember.py                               |  42 +-
 mypy/checkstrformat.py                            |  23 +-
 mypy/defaults.py                                  |   3 +-
 mypy/errors.py                                    | 100 ++--
 mypy/expandtype.py                                |   9 +-
 mypy/exprtotype.py                                |   7 +-
 mypy/fastparse.py                                 | 264 ++++----
 mypy/{fastparse.py => fastparse2.py}              | 695 +++++++++++-----------
 mypy/fixup.py                                     |   5 +-
 mypy/indirection.py                               | 103 ++++
 mypy/join.py                                      |   7 +-
 mypy/lex.py                                       |  20 +-
 mypy/main.py                                      | 212 +++++--
 mypy/messages.py                                  |   9 +-
 mypy/nodes.py                                     | 220 ++++---
 mypy/options.py                                   |  64 +-
 mypy/parse.py                                     | 275 +++++----
 mypy/report.py                                    | 112 +++-
 mypy/semanal.py                                   | 366 +++++++-----
 mypy/stats.py                                     |   7 +-
 mypy/strconv.py                                   | 187 +++---
 mypy/stubgen.py                                   | 131 ++--
 mypy/subtypes.py                                  |  21 +-
 mypy/treetransform.py                             | 345 ++++++-----
 mypy/typeanal.py                                  |  23 +-
 mypy/typefixture.py                               |   9 +-
 mypy/types.py                                     | 119 ++--
 mypy/util.py                                      |  13 +
 mypy/version.py                                   |   2 +-
 setup.py                                          |   2 +-
 typeshed/stdlib/2.7/HTMLParser.pyi                |   7 +-
 typeshed/stdlib/2.7/UserDict.pyi                  |  31 +-
 typeshed/stdlib/2.7/__builtin__.pyi               |  25 +-
 typeshed/stdlib/2.7/_weakrefset.pyi               |  17 +-
 typeshed/stdlib/2.7/builtins.pyi                  |  25 +-
 typeshed/stdlib/2.7/calendar.pyi                  |  11 +
 typeshed/stdlib/2.7/collections.pyi               |   5 +-
 typeshed/stdlib/2.7/datetime.pyi                  |  15 +-
 typeshed/stdlib/2.7/decimal.pyi                   | 246 ++++----
 typeshed/stdlib/2.7/difflib.pyi                   |   4 +-
 typeshed/stdlib/2.7/fileinput.pyi                 |  46 ++
 typeshed/stdlib/2.7/httplib.pyi                   |  69 ++-
 typeshed/stdlib/2.7/inspect.pyi                   |   2 +-
 typeshed/stdlib/2.7/io.pyi                        |   3 +
 typeshed/stdlib/2.7/numbers.pyi                   |  77 ---
 typeshed/stdlib/2.7/os/__init__.pyi               |  18 +-
 typeshed/stdlib/2.7/socket.pyi                    |   4 +-
 typeshed/stdlib/2.7/string.pyi                    |   4 +-
 typeshed/stdlib/2.7/subprocess.pyi                |   3 +-
 typeshed/stdlib/2.7/tarfile.pyi                   | 239 --------
 typeshed/stdlib/2.7/typing.pyi                    |  40 +-
 typeshed/stdlib/2.7/unittest.pyi                  |  41 +-
 typeshed/stdlib/2.7/weakref.pyi                   |  99 ++-
 typeshed/stdlib/2and3/argparse.pyi                |   1 +
 typeshed/stdlib/2and3/asynchat.pyi                |  41 ++
 typeshed/stdlib/2and3/asyncore.pyi                | 127 ++++
 typeshed/stdlib/2and3/fractions.pyi               |  94 +++
 typeshed/stdlib/2and3/locale.pyi                  |  10 +-
 typeshed/stdlib/2and3/logging/__init__.pyi        |  11 +-
 typeshed/stdlib/2and3/mmap.pyi                    |   8 +-
 typeshed/stdlib/2and3/numbers.pyi                 | 140 +++++
 typeshed/stdlib/2and3/operator.pyi                |   4 +-
 typeshed/stdlib/2and3/plistlib.pyi                |   2 +-
 typeshed/stdlib/2and3/tarfile.pyi                 | 178 ++++++
 typeshed/stdlib/3.4/asyncio/tasks.pyi             |  10 +-
 typeshed/stdlib/3.4/enum.pyi                      |   3 +-
 typeshed/stdlib/3.5/pathlib.pyi                   |  36 +-
 typeshed/stdlib/3/atexit.pyi                      |   2 +-
 typeshed/stdlib/3/binascii.pyi                    |  14 +-
 typeshed/stdlib/3/builtins.pyi                    |  51 +-
 typeshed/stdlib/3/calendar.pyi                    |  11 +
 typeshed/stdlib/3/collections/__init__.pyi        |  16 +-
 typeshed/stdlib/3/concurrent/futures/__init__.pyi |   4 -
 typeshed/stdlib/3/concurrent/futures/_base.pyi    |  51 +-
 typeshed/stdlib/3/concurrent/futures/process.pyi  |  49 +-
 typeshed/stdlib/3/concurrent/futures/thread.pyi   |  24 +-
 typeshed/stdlib/3/configparser.pyi                |  37 +-
 typeshed/stdlib/3/datetime.pyi                    |  10 +-
 typeshed/stdlib/3/decimal.pyi                     |   9 +-
 typeshed/stdlib/3/difflib.pyi                     |   4 +-
 typeshed/stdlib/3/http/__init__.pyi               |  68 +++
 typeshed/stdlib/3/inspect.pyi                     |   2 +-
 typeshed/stdlib/3/numbers.pyi                     |  80 ---
 typeshed/stdlib/3/signal.pyi                      | 159 +++--
 typeshed/stdlib/3/socket.pyi                      |   4 +-
 typeshed/stdlib/3/stat.pyi                        |   2 +-
 typeshed/stdlib/3/string.pyi                      |   4 +-
 typeshed/stdlib/3/subprocess.pyi                  |   7 +-
 typeshed/stdlib/3/sys.pyi                         |   2 +-
 typeshed/stdlib/3/tarfile.pyi                     |  33 -
 typeshed/stdlib/3/tempfile.pyi                    |   9 +-
 typeshed/stdlib/3/time.pyi                        |  37 +-
 typeshed/stdlib/3/typing.pyi                      |  38 +-
 typeshed/stdlib/3/unittest.pyi                    |   3 +-
 typeshed/tests/mypy_test.py                       |  18 +-
 typeshed/third_party/2.7/itsdangerous.pyi         | 153 +++++
 typeshed/third_party/2.7/requests/sessions.pyi    |  50 +-
 typeshed/third_party/2.7/six/__init__.pyi         |   2 +-
 typeshed/third_party/2and3/ujson.pyi              |   8 +-
 typeshed/third_party/3/enum.pyi                   |   3 +-
 typeshed/third_party/3/itsdangerous.pyi           | 156 +++++
 typeshed/third_party/3/requests/sessions.pyi      |  48 +-
 typeshed/third_party/3/six/__init__.pyi           |   2 +-
 typeshed/third_party/3/typed_ast/ast27.pyi        |   1 +
 typeshed/third_party/3/typed_ast/ast35.pyi        |   3 +-
 110 files changed, 4329 insertions(+), 2638 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index dd5bf99..3c8fe23 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: mypy-lang
-Version: 0.4.4
+Version: 0.4.5
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
@@ -22,7 +22,7 @@ Classifier: Environment :: Console
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Operating System :: POSIX
-Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
diff --git a/mypy/binder.py b/mypy/binder.py
index 2a98751..bc633e5 100644
--- a/mypy/binder.py
+++ b/mypy/binder.py
@@ -1,8 +1,8 @@
-from typing import (Any, Dict, List, Set, Iterator)
+from typing import (Any, Dict, List, Set, Iterator, Union)
 from contextlib import contextmanager
 
 from mypy.types import Type, AnyType, PartialType
-from mypy.nodes import (Node, Var)
+from mypy.nodes import (Node, Expression, Var, RefExpr, SymbolTableNode)
 
 from mypy.subtypes import is_subtype
 from mypy.join import join_simple
@@ -96,19 +96,19 @@ class ConditionalTypeBinder:
                 return self.frames[i][key]
         return None
 
-    def push(self, expr: Node, typ: Type) -> None:
-        if not expr.literal:
+    def push(self, node: Node, typ: Type) -> None:
+        if not node.literal:
             return
-        key = expr.literal_hash
+        key = node.literal_hash
         if key not in self.declarations:
-            self.declarations[key] = self.get_declaration(expr)
+            self.declarations[key] = self.get_declaration(node)
             self._add_dependencies(key)
         self._push(key, typ)
 
-    def get(self, expr: Node) -> Type:
+    def get(self, expr: Union[Expression, Var]) -> Type:
         return self._get(expr.literal_hash)
 
-    def cleanse(self, expr: Node) -> None:
+    def cleanse(self, expr: Expression) -> None:
         """Remove all references to a Node from the binder."""
         self._cleanse_key(expr.literal_hash)
 
@@ -165,16 +165,16 @@ class ConditionalTypeBinder:
 
         return result
 
-    def get_declaration(self, expr: Any) -> Type:
-        if hasattr(expr, 'node') and isinstance(expr.node, Var):
-            type = expr.node.type
+    def get_declaration(self, node: Node) -> Type:
+        if isinstance(node, (RefExpr, SymbolTableNode)) and isinstance(node.node, Var):
+            type = node.node.type
             if isinstance(type, PartialType):
                 return None
             return type
         else:
             return None
 
-    def assign_type(self, expr: Node,
+    def assign_type(self, expr: Expression,
                     type: Type,
                     declared_type: Type,
                     restrict_any: bool = False) -> None:
@@ -197,7 +197,6 @@ class ConditionalTypeBinder:
 
         # If x is Any and y is int, after x = y we do not infer that x is int.
         # This could be changed.
-        # Eric: I'm changing it in weak typing mode, since Any is so common.
 
         if (isinstance(self.most_recent_enclosing_type(expr, type), AnyType)
                 and not restrict_any):
@@ -212,7 +211,7 @@ class ConditionalTypeBinder:
             # just copy this variable into a single stored frame.
             self.allow_jump(i)
 
-    def invalidate_dependencies(self, expr: Node) -> None:
+    def invalidate_dependencies(self, expr: Expression) -> None:
         """Invalidate knowledge of types that include expr, but not expr itself.
 
         For example, when expr is foo.bar, invalidate foo.bar.baz.
@@ -223,7 +222,7 @@ class ConditionalTypeBinder:
         for dep in self.dependencies.get(expr.literal_hash, set()):
             self._cleanse_key(dep)
 
-    def most_recent_enclosing_type(self, expr: Node, type: Type) -> Type:
+    def most_recent_enclosing_type(self, expr: Expression, type: Type) -> Type:
         if isinstance(type, AnyType):
             return self.get_declaration(expr)
         key = expr.literal_hash
diff --git a/mypy/build.py b/mypy/build.py
index fe40e3d..b7ee2d0 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -22,17 +22,14 @@ import time
 from os.path import dirname, basename
 
 from typing import (AbstractSet, Dict, Iterable, Iterator, List,
-                    NamedTuple, Optional, Set, Tuple, Union, Mapping)
+                    NamedTuple, Optional, Set, Tuple, Union)
 
-from mypy.types import Type
-from mypy.nodes import (MypyFile, Node, Import, ImportFrom, ImportAll,
-                        SymbolTableNode, MODULE_REF)
+from mypy.nodes import (MypyFile, Import, ImportFrom, ImportAll)
 from mypy.semanal import FirstPass, SemanticAnalyzer, ThirdPass
 from mypy.checker import TypeChecker
+from mypy.indirection import TypeIndirectionVisitor
 from mypy.errors import Errors, CompileError, DecodeError, report_internal_error
-from mypy import fixup
 from mypy.report import Reports
-from mypy import defaults
 from mypy import moduleinfo
 from mypy import util
 from mypy.fixup import fixup_module_pass_one, fixup_module_pass_two
@@ -184,8 +181,9 @@ def build(sources: List[BuildSource],
         dispatch(sources, manager)
         return BuildResult(manager)
     finally:
-        manager.log("Build finished with %d modules, %d types, and %d errors" %
-                    (len(manager.modules),
+        manager.log("Build finished in %.3f seconds with %d modules, %d types, and %d errors" %
+                    (time.time() - manager.start_time,
+                     len(manager.modules),
                      len(manager.type_checker.type_map),
                      manager.errors.num_messages()))
         # Finish the HTML or XML reports even if CompileError was raised.
@@ -305,6 +303,7 @@ CacheMeta = NamedTuple('CacheMeta',
 PRI_HIGH = 5  # top-level "from X import blah"
 PRI_MED = 10  # top-level "import X"
 PRI_LOW = 20  # either form inside a function
+PRI_INDIRECT = 30  # an indirect dependency
 PRI_ALL = 99  # include all priorities
 
 
@@ -340,17 +339,18 @@ class BuildManager:
                  version_id: str) -> None:
         self.start_time = time.time()
         self.data_dir = data_dir
-        self.errors = Errors(options.suppress_error_context)
+        self.errors = Errors(options.hide_error_context, options.show_column_numbers)
         self.errors.set_ignore_prefix(ignore_prefix)
         self.lib_path = tuple(lib_path)
         self.source_set = source_set
         self.reports = reports
         self.options = options
         self.version_id = version_id
-        self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors, options=options)
+        self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors)
         self.modules = self.semantic_analyzer.modules
         self.semantic_analyzer_pass3 = ThirdPass(self.modules, self.errors)
-        self.type_checker = TypeChecker(self.errors, self.modules, options=options)
+        self.type_checker = TypeChecker(self.errors, self.modules)
+        self.indirection_detector = TypeIndirectionVisitor()
         self.missing_modules = set()  # type: Set[str]
         self.stale_modules = set()  # type: Set[str]
         self.rechecked_modules = set()  # type: Set[str]
@@ -450,15 +450,15 @@ class BuildManager:
         if ((self.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
                 (self.options.python_version[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
             self.errors.report(
-                line, "No library stub file for standard library module '{}'".format(id))
-            self.errors.report(line, stub_msg, severity='note', only_once=True)
+                line, 0, "No library stub file for standard library module '{}'".format(id))
+            self.errors.report(line, 0, stub_msg, severity='note', only_once=True)
         elif moduleinfo.is_third_party_module(id):
-            self.errors.report(line, "No library stub file for module '{}'".format(id))
-            self.errors.report(line, stub_msg, severity='note', only_once=True)
+            self.errors.report(line, 0, "No library stub file for module '{}'".format(id))
+            self.errors.report(line, 0, stub_msg, severity='note', only_once=True)
         else:
-            self.errors.report(line, "Cannot find module named '{}'".format(id))
-            self.errors.report(line, '(Perhaps setting MYPYPATH '
-                                     'or using the "--silent-imports" flag would help)',
+            self.errors.report(line, 0, "Cannot find module named '{}'".format(id))
+            self.errors.report(line, 0, '(Perhaps setting MYPYPATH '
+                               'or using the "--silent-imports" flag would help)',
                                severity='note', only_once=True)
 
     def report_file(self, file: MypyFile) -> None:
@@ -467,12 +467,12 @@ class BuildManager:
 
     def log(self, *message: str) -> None:
         if self.options.verbosity >= 1:
-            print('%.3f:LOG: ' % (time.time() - self.start_time), *message, file=sys.stderr)
+            print('LOG: ', *message, file=sys.stderr)
             sys.stderr.flush()
 
     def trace(self, *message: str) -> None:
         if self.options.verbosity >= 2:
-            print('%.3f:TRACE:' % (time.time() - self.start_time), *message, file=sys.stderr)
+            print('TRACE:', *message, file=sys.stderr)
             sys.stderr.flush()
 
 
@@ -758,7 +758,7 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
 
     # Ignore cache if (relevant) options aren't the same.
     cached_options = m.options
-    current_options = select_options_affecting_cache(manager.options)
+    current_options = manager.options.select_options_affecting_cache()
     if cached_options != current_options:
         manager.trace('Metadata abandoned for {}: options differ'.format(id))
         return None
@@ -766,7 +766,7 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
     return m
 
 
-def is_meta_fresh(meta: CacheMeta, id: str, path: str, manager: BuildManager) -> bool:
+def is_meta_fresh(meta: Optional[CacheMeta], id: str, path: str, manager: BuildManager) -> bool:
     if meta is None:
         return False
 
@@ -786,19 +786,6 @@ def is_meta_fresh(meta: CacheMeta, id: str, path: str, manager: BuildManager) ->
     return True
 
 
-def select_options_affecting_cache(options: Options) -> Mapping[str, bool]:
-    return {opt: getattr(options, opt) for opt in OPTIONS_AFFECTING_CACHE}
-
-
-OPTIONS_AFFECTING_CACHE = [
-    "silent_imports",
-    "almost_silent",
-    "disallow_untyped_calls",
-    "disallow_untyped_defs",
-    "check_untyped_defs",
-]
-
-
 def random_string() -> str:
     return binascii.hexlify(os.urandom(8)).decode('ascii')
 
@@ -848,7 +835,10 @@ def write_cache(id: str, path: str, tree: MypyFile,
 
     # Serialize data and analyze interface
     data = tree.serialize()
-    data_str = json.dumps(data, indent=2, sort_keys=True)
+    if manager.options.debug_cache:
+        data_str = json.dumps(data, indent=2, sort_keys=True)
+    else:
+        data_str = json.dumps(data, sort_keys=True)
     interface_hash = compute_hash(data_str)
 
     # Write data cache file, if applicable
@@ -869,6 +859,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
     st = manager.get_stat(path)  # TODO: Handle errors
     mtime = st.st_mtime
     size = st.st_size
+    options = manager.options.clone_for_file(path)
     meta = {'id': id,
             'path': path,
             'mtime': mtime,
@@ -877,7 +868,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
             'dependencies': dependencies,
             'suppressed': suppressed,
             'child_modules': child_modules,
-            'options': select_options_affecting_cache(manager.options),
+            'options': options.select_options_affecting_cache(),
             'dep_prios': dep_prios,
             'interface_hash': interface_hash,
             'version_id': manager.version_id,
@@ -885,8 +876,10 @@ def write_cache(id: str, path: str, tree: MypyFile,
 
     # Write meta cache file
     with open(meta_json_tmp, 'w') as f:
-        json.dump(meta, f, sort_keys=True)
-        f.write('\n')
+        if manager.options.debug_cache:
+            json.dump(meta, f, indent=2, sort_keys=True)
+        else:
+            json.dump(meta, f)
     os.replace(meta_json_tmp, meta_json)
 
     return interface_hash
@@ -1082,6 +1075,9 @@ class State:
     # Contains a hash of the public interface in incremental mode
     interface_hash = ""  # type: str
 
+    # Options, specialized for this file
+    options = None  # type: Options
+
     def __init__(self,
                  id: Optional[str],
                  path: Optional[str],
@@ -1103,9 +1099,10 @@ class State:
         else:
             self.import_context = []
         self.id = id or '__main__'
+        self.options = manager.options.clone_for_file(path or '')
         if not path and source is None:
             file_id = id
-            if id == 'builtins' and manager.options.python_version[0] == 2:
+            if id == 'builtins' and self.options.python_version[0] == 2:
                 # The __builtin__ module is called internally by mypy
                 # 'builtins' in Python 2 mode (similar to Python 3),
                 # but the stub file is __builtin__.pyi.  The reason is
@@ -1118,7 +1115,7 @@ class State:
             path = find_module(file_id, manager.lib_path)
             if path:
                 # In silent mode, don't import .py files, except from stubs.
-                if (manager.options.silent_imports and
+                if (self.options.silent_imports and
                         path.endswith('.py') and (caller_state or ancestor_for)):
                     # (Never silence builtins, even if it's a .py file;
                     # this can happen in tests!)
@@ -1126,7 +1123,7 @@ class State:
                         not ((caller_state and
                               caller_state.tree and
                               caller_state.tree.is_stub))):
-                        if manager.options.almost_silent:
+                        if self.options.almost_silent:
                             if ancestor_for:
                                 self.skipping_ancestor(id, path, ancestor_for)
                             else:
@@ -1139,10 +1136,8 @@ class State:
                 # misspelled module name, missing stub, module not in
                 # search path or the module has not been installed.
                 if caller_state:
-                    suppress_message = ((manager.options.silent_imports and
-                                        not manager.options.almost_silent) or
-                                        (caller_state.tree is not None and
-                                         'import' in caller_state.tree.weak_opts))
+                    suppress_message = (self.options.silent_imports
+                                        and not self.options.almost_silent)
                     if not suppress_message:
                         save_import_context = manager.errors.import_context()
                         manager.errors.set_import_context(caller_state.import_context)
@@ -1158,7 +1153,7 @@ class State:
         self.path = path
         self.xpath = path or '<string>'
         self.source = source
-        if path and source is None and manager.options.incremental:
+        if path and source is None and self.options.incremental:
             self.meta = find_cache_meta(self.id, self.path, manager)
             # TODO: Get mtime if not cached.
             if self.meta is not None:
@@ -1189,11 +1184,11 @@ class State:
         manager = self.manager
         manager.errors.set_import_context([])
         manager.errors.set_file(ancestor_for.xpath)
-        manager.errors.report(-1, "Ancestor package '%s' silently ignored" % (id,),
+        manager.errors.report(-1, -1, "Ancestor package '%s' silently ignored" % (id,),
                               severity='note', only_once=True)
-        manager.errors.report(-1, "(Using --silent-imports, submodule passed on command line)",
+        manager.errors.report(-1, -1, "(Using --silent-imports, submodule passed on command line)",
                               severity='note', only_once=True)
-        manager.errors.report(-1, "(This note brought to you by --almost-silent)",
+        manager.errors.report(-1, -1, "(This note brought to you by --almost-silent)",
                               severity='note', only_once=True)
 
     def skipping_module(self, id: str, path: str) -> None:
@@ -1203,11 +1198,13 @@ class State:
         manager.errors.set_import_context(self.caller_state.import_context)
         manager.errors.set_file(self.caller_state.xpath)
         line = self.caller_line
-        manager.errors.report(line, "Import of '%s' silently ignored" % (id,),
+        manager.errors.report(line, 0,
+                              "Import of '%s' silently ignored" % (id,),
                               severity='note')
-        manager.errors.report(line, "(Using --silent-imports, module not passed on command line)",
+        manager.errors.report(line, 0,
+                              "(Using --silent-imports, module not passed on command line)",
                               severity='note', only_once=True)
-        manager.errors.report(line, "(This note courtesy of --almost-silent)",
+        manager.errors.report(line, 0, "(This note courtesy of --almost-silent)",
                               severity='note', only_once=True)
         manager.errors.set_import_context(save_import_context)
 
@@ -1263,7 +1260,7 @@ class State:
         except CompileError:
             raise
         except Exception as err:
-            report_internal_error(err, self.path, 0, self.manager.errors)
+            report_internal_error(err, self.path, 0, self.manager.errors, self.options)
         self.manager.errors.set_import_context(save_import_context)
         self.check_blockers()
 
@@ -1300,7 +1297,7 @@ class State:
         """
         # TODO: See if it's possible to move this check directly into parse_file in some way.
         # TODO: Find a way to write a test case for this fix.
-        silent_mode = self.manager.options.silent_imports or self.manager.options.almost_silent
+        silent_mode = self.options.silent_imports or self.options.almost_silent
         if not silent_mode:
             return
 
@@ -1333,7 +1330,7 @@ class State:
             if self.path and source is None:
                 try:
                     path = manager.maybe_swap_for_shadow_path(self.path)
-                    source = read_with_python_encoding(path, manager.options.python_version)
+                    source = read_with_python_encoding(path, self.options.python_version)
                 except IOError as ioerr:
                     raise CompileError([
                         "mypy: can't read file '{}': {}".format(self.path, ioerr.strerror)])
@@ -1349,7 +1346,7 @@ class State:
         # this before processing imports, since this may mark some
         # import statements as unreachable.
         first = FirstPass(manager.semantic_analyzer)
-        first.analyze(self.tree, self.xpath, self.id)
+        first.visit_file(self.tree, self.xpath, self.id, self.options)
 
         # Initialize module symbol table, which was populated by the
         # semantic analyzer.
@@ -1377,7 +1374,8 @@ class State:
             if id == '':
                 # Must be from a relative import.
                 manager.errors.set_file(self.xpath)
-                manager.errors.report(line, "No parent module -- cannot perform relative import",
+                manager.errors.report(line, 0,
+                                      "No parent module -- cannot perform relative import",
                                       blocker=True)
                 continue
             if id not in dep_line_map:
@@ -1399,42 +1397,58 @@ class State:
         self.dep_line_map = dep_line_map
         self.check_blockers()
 
-    def patch_parent(self) -> None:
-        # Include module in the symbol table of the enclosing package.
-        if '.' not in self.id:
-            return
-        manager = self.manager
-        modules = manager.modules
-        parent, child = self.id.rsplit('.', 1)
-        if parent in modules:
-            manager.trace("Added %s.%s" % (parent, child))
-            modules[parent].names[child] = SymbolTableNode(MODULE_REF, self.tree, parent)
-        else:
-            manager.log("Hm... couldn't add %s.%s" % (parent, child))
-
     def semantic_analysis(self) -> None:
         with self.wrap_context():
-            self.manager.semantic_analyzer.visit_file(self.tree, self.xpath)
+            self.manager.semantic_analyzer.visit_file(self.tree, self.xpath, self.options)
 
     def semantic_analysis_pass_three(self) -> None:
         with self.wrap_context():
-            self.manager.semantic_analyzer_pass3.visit_file(self.tree, self.xpath)
-            if self.manager.options.dump_type_stats:
+            self.manager.semantic_analyzer_pass3.visit_file(self.tree, self.xpath, self.options)
+            if self.options.dump_type_stats:
                 dump_type_stats(self.tree, self.xpath)
 
     def type_check(self) -> None:
         manager = self.manager
-        if manager.options.semantic_analysis_only:
+        if self.options.semantic_analysis_only:
             return
         with self.wrap_context():
-            manager.type_checker.visit_file(self.tree, self.xpath)
-            if manager.options.dump_inference_stats:
+            manager.type_checker.visit_file(self.tree, self.xpath, self.options)
+
+            if self.options.incremental:
+                self._patch_indirect_dependencies(manager.type_checker.module_refs)
+
+            if self.options.dump_inference_stats:
                 dump_type_stats(self.tree, self.xpath, inferred=True,
                                 typemap=manager.type_checker.type_map)
             manager.report_file(self.tree)
 
+    def _patch_indirect_dependencies(self, module_refs: Set[str]) -> None:
+        types = self.manager.type_checker.module_type_map.values()
+        valid = self.valid_references()
+
+        encountered = self.manager.indirection_detector.find_modules(types) | module_refs
+        extra = encountered - valid
+
+        for dep in sorted(extra):
+            if dep not in self.manager.modules:
+                continue
+            if dep not in self.suppressed and dep not in self.manager.missing_modules:
+                self.dependencies.append(dep)
+                self.priorities[dep] = PRI_INDIRECT
+            elif dep not in self.suppressed and dep in self.manager.missing_modules:
+                self.suppressed.append(dep)
+
+    def valid_references(self) -> Set[str]:
+        valid_refs = set(self.dependencies + self.suppressed + self.ancestors)
+        valid_refs .add(self.id)
+
+        if "os" in valid_refs:
+            valid_refs.add("os.path")
+
+        return valid_refs
+
     def write_cache(self) -> None:
-        if self.path and self.manager.options.incremental and not self.manager.errors.is_errors():
+        if self.path and self.options.incremental and not self.manager.errors.is_errors():
             dep_prios = [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies]
             new_interface_hash = write_cache(
                 self.id, self.path, self.tree,
@@ -1455,6 +1469,7 @@ def dispatch(sources: List[BuildSource], manager: BuildManager) -> None:
     manager.log("Loaded graph with %d nodes" % len(graph))
     process_graph(graph, manager)
     if manager.options.warn_unused_ignores:
+        # TODO: This could also be a per-file option.
         manager.errors.generate_unused_ignore_notes()
 
 
@@ -1474,7 +1489,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
             continue
         if st.id in graph:
             manager.errors.set_file(st.xpath)
-            manager.errors.report(-1, "Duplicate module named '%s'" % st.id)
+            manager.errors.report(-1, -1, "Duplicate module named '%s'" % st.id)
             manager.errors.raise_error()
         graph[st.id] = st
         new.append(st)
@@ -1515,6 +1530,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
     for id, g in graph.items():
         if g.has_new_submodules():
             g.parse_file()
+            g.fix_suppressed_dependencies(graph)
             g.mark_interface_stale()
     return graph
 
@@ -1524,6 +1540,9 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
     sccs = sorted_components(graph)
     manager.log("Found %d SCCs; largest has %d nodes" %
                 (len(sccs), max(len(scc) for scc in sccs)))
+
+    fresh_scc_queue = []  # type: List[List[str]]
+
     # We're processing SCCs from leaves (those without further
     # dependencies) to roots (those from which everything else can be
     # reached).
@@ -1596,39 +1615,42 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
         elif undeps:
             fresh_msg = "stale due to changed suppression (%s)" % " ".join(sorted(undeps))
         elif stale_scc:
-            fresh_msg = "inherently stale (%s)" % " ".join(sorted(stale_scc))
+            fresh_msg = "inherently stale"
+            if stale_scc != ascc:
+                fresh_msg += " (%s)" % " ".join(sorted(stale_scc))
             if stale_deps:
                 fresh_msg += " with stale deps (%s)" % " ".join(sorted(stale_deps))
         else:
             fresh_msg = "stale due to deps (%s)" % " ".join(sorted(stale_deps))
-        if len(scc) == 1:
-            manager.log("Processing SCC singleton (%s) as %s" % (" ".join(scc), fresh_msg))
-        else:
-            manager.log("Processing SCC of size %d (%s) as %s" %
-                        (len(scc), " ".join(scc), fresh_msg))
+
+        scc_str = " ".join(scc)
         if fresh:
-            process_fresh_scc(graph, scc)
+            manager.log("Queuing fresh SCC (%s)" % scc_str)
+            fresh_scc_queue.append(scc)
         else:
+            if len(fresh_scc_queue) > 0:
+                manager.log("Processing the last {} queued SCCs".format(len(fresh_scc_queue)))
+                # Defer processing fresh SCCs until we actually run into a stale SCC
+                # and need the earlier modules to be loaded.
+                #
+                # Note that `process_graph` may end with us not having processed every
+                # single fresh SCC. This is intentional -- we don't need those modules
+                # loaded if there are no more stale SCCs to be rechecked.
+                #
+                # TODO: see if it's possible to determine if we need to process only a
+                # _subset_ of the past SCCs instead of having to process them all.
+                for prev_scc in fresh_scc_queue:
+                    process_fresh_scc(graph, prev_scc)
+                fresh_scc_queue = []
+            size = len(scc)
+            if size == 1:
+                manager.log("Processing SCC singleton (%s) as %s" % (scc_str, fresh_msg))
+            else:
+                manager.log("Processing SCC of size %d (%s) as %s" % (size, scc_str, fresh_msg))
             process_stale_scc(graph, scc)
 
-        # TODO: This is a workaround to get around the "chaining imports" problem
-        # with the interface checks.
-        #
-        # That is, if we have a file named `module_a.py` which does:
-        #
-        #     import module_b
-        #     module_b.module_c.foo(3)
-        #
-        # ...and if the type signature of `module_c.foo(...)` were to change,
-        # module_a_ would not be rechecked since the interface of `module_b`
-        # would not be considered changed.
-        #
-        # As a workaround, this check will force a module's interface to be
-        # considered stale if anything it imports has a stale interface,
-        # which ensures these changes are caught and propagated.
-        if len(stale_deps) > 0:
-            for id in scc:
-                graph[id].mark_interface_stale()
+    sccs_left = len(fresh_scc_queue)
+    manager.log("{} fresh SCCs left in queue (and will remain unprocessed)".format(sccs_left))
 
 
 def order_ascc(graph: Graph, ascc: AbstractSet[str], pri_max: int = PRI_ALL) -> List[str]:
@@ -1683,8 +1705,6 @@ def process_fresh_scc(graph: Graph, scc: List[str]) -> None:
     for id in scc:
         graph[id].load_tree()
     for id in scc:
-        graph[id].patch_parent()
-    for id in scc:
         graph[id].fix_cross_refs()
     for id in scc:
         graph[id].calculate_mros()
@@ -1698,8 +1718,6 @@ def process_stale_scc(graph: Graph, scc: List[str]) -> None:
         graph[id].parse_file()
         graph[id].fix_suppressed_dependencies(graph)
     for id in scc:
-        graph[id].patch_parent()
-    for id in scc:
         graph[id].semantic_analysis()
     for id in scc:
         graph[id].semantic_analysis_pass_three()
diff --git a/mypy/checker.py b/mypy/checker.py
index 6a5b0d9..b5669f4 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -1,17 +1,15 @@
 """Mypy type checker."""
 
 import itertools
-import contextlib
-import os
-import os.path
+import fnmatch
 
 from typing import (
-    Any, Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple
+    Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple
 )
 
 from mypy.errors import Errors, report_internal_error
 from mypy.nodes import (
-    SymbolTable, Node, MypyFile, Var, Expression,
+    SymbolTable, Node, MypyFile, Var, Expression, Lvalue,
     OverloadedFuncDef, FuncDef, FuncItem, FuncBase, TypeInfo,
     ClassDef, GDEF, Block, AssignmentStmt, NameExpr, MemberExpr, IndexExpr,
     TupleExpr, ListExpr, ExpressionStmt, ReturnStmt, IfStmt,
@@ -40,7 +38,6 @@ from mypy.sametypes import is_same_type
 from mypy.messages import MessageBuilder
 import mypy.checkexpr
 from mypy.checkmember import map_type_from_supertype
-from mypy import defaults
 from mypy import messages
 from mypy.subtypes import (
     is_subtype, is_equivalent, is_proper_subtype,
@@ -49,7 +46,7 @@ from mypy.subtypes import (
 from mypy.maptype import map_instance_to_supertype
 from mypy.semanal import self_type, set_callable_name, refers_to_fullname
 from mypy.erasetype import erase_typevars
-from mypy.expandtype import expand_type_by_instance, expand_type
+from mypy.expandtype import expand_type
 from mypy.visitor import NodeVisitor
 from mypy.join import join_types
 from mypy.treetransform import TransformVisitor
@@ -86,6 +83,8 @@ class TypeChecker(NodeVisitor[Type]):
     msg = None  # type: MessageBuilder
     # Types of type checked nodes
     type_map = None  # type: Dict[Node, Type]
+    # Types of type checked nodes within this specific module
+    module_type_map = None  # type: Dict[Node, Type]
 
     # Helper for managing conditional types
     binder = None  # type: ConditionalTypeBinder
@@ -100,8 +99,6 @@ class TypeChecker(NodeVisitor[Type]):
     dynamic_funcs = None  # type: List[bool]
     # Stack of functions being type checked
     function_stack = None  # type: List[FuncItem]
-    # Do weak type checking in this file
-    weak_opts = set()        # type: Set[str]
     # Stack of collections of variables with partial types
     partial_types = None  # type: List[Dict[Var, Context]]
     globals = None  # type: SymbolTable
@@ -114,40 +111,55 @@ class TypeChecker(NodeVisitor[Type]):
     # Have we deferred the current function? If yes, don't infer additional
     # types during this pass within the function.
     current_node_deferred = False
+    # Is this file a typeshed stub?
     is_typeshed_stub = False
+    # Should strict Optional-related errors be suppressed in this file?
+    suppress_none_errors = False  # TODO: Get it from options instead
     options = None  # type: Options
 
-    def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Options) -> None:
+    # The set of all dependencies (suppressed or not) that this module accesses, either
+    # directly or indirectly.
+    module_refs = None  # type: Set[str]
+
+    def __init__(self, errors: Errors, modules: Dict[str, MypyFile]) -> None:
         """Construct a type checker.
 
         Use errors to report type check errors.
         """
         self.errors = errors
         self.modules = modules
-        self.options = options
         self.msg = MessageBuilder(errors, modules)
         self.type_map = {}
+        self.module_type_map = {}
         self.binder = ConditionalTypeBinder()
         self.expr_checker = mypy.checkexpr.ExpressionChecker(self, self.msg)
         self.return_types = []
         self.type_context = []
         self.dynamic_funcs = []
         self.function_stack = []
-        self.weak_opts = set()  # type: Set[str]
         self.partial_types = []
         self.deferred_nodes = []
         self.pass_num = 0
         self.current_node_deferred = False
+        self.module_refs = set()
 
-    def visit_file(self, file_node: MypyFile, path: str) -> None:
+    def visit_file(self, file_node: MypyFile, path: str, options: Options) -> None:
         """Type check a mypy file with the given path."""
+        self.options = options
         self.pass_num = 0
         self.is_stub = file_node.is_stub
         self.errors.set_file(path)
         self.globals = file_node.names
-        self.weak_opts = file_node.weak_opts
         self.enter_partial_types()
         self.is_typeshed_stub = self.errors.is_typeshed_file(path)
+        self.module_type_map = {}
+        self.module_refs = set()
+        if self.options.strict_optional_whitelist is None:
+            self.suppress_none_errors = not self.options.show_none_errors
+        else:
+            self.suppress_none_errors = not any(fnmatch.fnmatch(path, pattern)
+                                                for pattern
+                                                in self.options.strict_optional_whitelist)
 
         for d in file_node.defs:
             self.accept(d)
@@ -168,6 +180,8 @@ class TypeChecker(NodeVisitor[Type]):
                 self.fail(messages.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s),
                           all_.node)
 
+        del self.options
+
     def check_second_pass(self) -> None:
         """Run second pass of type checking which goes through deferred nodes."""
         self.pass_num = 1
@@ -201,15 +215,15 @@ class TypeChecker(NodeVisitor[Type]):
         try:
             typ = node.accept(self)
         except Exception as err:
-            report_internal_error(err, self.errors.file, node.line, self.errors)
+            report_internal_error(err, self.errors.file, node.line, self.errors, self.options)
         self.type_context.pop()
         self.store_type(node, typ)
-        if self.typing_mode_none():
+        if not self.in_checked_function():
             return AnyType()
         else:
             return typ
 
-    def accept_loop(self, body: Node, else_body: Node = None) -> Type:
+    def accept_loop(self, body: Union[IfStmt, Block], else_body: Block = None) -> Type:
         """Repeatedly type check a loop body until the frame doesn't change.
 
         Then check the else_body.
@@ -904,7 +918,9 @@ class TypeChecker(NodeVisitor[Type]):
         with self.binder.frame_context():
             self.accept(defn.defs)
         self.binder = old_binder
-        self.check_multiple_inheritance(typ)
+        if not defn.has_incompatible_baseclass:
+            # Otherwise we've already found errors; more errors are not useful
+            self.check_multiple_inheritance(typ)
         self.leave_partial_types()
         self.errors.pop_type()
 
@@ -1006,7 +1022,7 @@ class TypeChecker(NodeVisitor[Type]):
 
         Handle all kinds of assignment statements (simple, indexed, multiple).
         """
-        self.check_assignment(s.lvalues[-1], s.rvalue, s.type is None)
+        self.check_assignment(s.lvalues[-1], s.rvalue, s.type is None, s.new_syntax)
 
         if len(s.lvalues) > 1:
             # Chained assignment (e.g. x = y = ...).
@@ -1017,7 +1033,8 @@ class TypeChecker(NodeVisitor[Type]):
             for lv in s.lvalues[:-1]:
                 self.check_assignment(lv, rvalue, s.type is None)
 
-    def check_assignment(self, lvalue: Node, rvalue: Node, infer_lvalue_type: bool = True) -> None:
+    def check_assignment(self, lvalue: Lvalue, rvalue: Expression, infer_lvalue_type: bool = True,
+                         new_syntax: bool = False) -> None:
         """Type check a single assignment: lvalue = rvalue."""
         if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr):
             self.check_assignment_to_multiple_lvalues(lvalue.items, rvalue, lvalue,
@@ -1054,7 +1071,8 @@ class TypeChecker(NodeVisitor[Type]):
                 elif (is_literal_none(rvalue) and
                         isinstance(lvalue, NameExpr) and
                         isinstance(lvalue.node, Var) and
-                        lvalue.node.is_initialized_in_class):
+                        lvalue.node.is_initialized_in_class and
+                        not new_syntax):
                     # Allow None's to be assigned to class variables with non-Optional types.
                     rvalue_type = lvalue_type
                 else:
@@ -1064,7 +1082,7 @@ class TypeChecker(NodeVisitor[Type]):
                     self.binder.assign_type(lvalue,
                                             rvalue_type,
                                             lvalue_type,
-                                            self.typing_mode_weak())
+                                            False)
             elif index_lvalue:
                 self.check_indexed_assignment(index_lvalue, rvalue, rvalue)
 
@@ -1072,7 +1090,7 @@ class TypeChecker(NodeVisitor[Type]):
                 self.infer_variable_type(inferred, lvalue, self.accept(rvalue),
                                          rvalue)
 
-    def check_assignment_to_multiple_lvalues(self, lvalues: List[Node], rvalue: Node,
+    def check_assignment_to_multiple_lvalues(self, lvalues: List[Lvalue], rvalue: Expression,
                                              context: Context,
                                              infer_lvalue_type: bool = True) -> None:
         if isinstance(rvalue, TupleExpr) or isinstance(rvalue, ListExpr):
@@ -1107,7 +1125,7 @@ class TypeChecker(NodeVisitor[Type]):
         else:
             self.check_multi_assignment(lvalues, rvalue, context, infer_lvalue_type)
 
-    def check_rvalue_count_in_assignment(self, lvalues: List[Node], rvalue_count: int,
+    def check_rvalue_count_in_assignment(self, lvalues: List[Lvalue], rvalue_count: int,
                                          context: Context) -> bool:
         if any(isinstance(lvalue, StarExpr) for lvalue in lvalues):
             if len(lvalues) - 1 > rvalue_count:
@@ -1120,8 +1138,8 @@ class TypeChecker(NodeVisitor[Type]):
             return False
         return True
 
-    def check_multi_assignment(self, lvalues: List[Node],
-                               rvalue: Node,
+    def check_multi_assignment(self, lvalues: List[Lvalue],
+                               rvalue: Expression,
                                context: Context,
                                infer_lvalue_type: bool = True,
                                msg: str = None) -> None:
@@ -1143,7 +1161,7 @@ class TypeChecker(NodeVisitor[Type]):
             self.check_multi_assignment_from_iterable(lvalues, rvalue_type,
                                                      context, infer_lvalue_type)
 
-    def check_multi_assignment_from_tuple(self, lvalues: List[Node], rvalue: Node,
+    def check_multi_assignment_from_tuple(self, lvalues: List[Lvalue], rvalue: Expression,
                                           rvalue_type: TupleType, context: Context,
                                           undefined_rvalue: bool,
                                           infer_lvalue_type: bool = True) -> None:
@@ -1166,14 +1184,14 @@ class TypeChecker(NodeVisitor[Type]):
             for lv, rv_type in zip(left_lvs, left_rv_types):
                 self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type)
             if star_lv:
-                nodes = [self.temp_node(rv_type, context) for rv_type in star_rv_types]
-                list_expr = ListExpr(nodes)
+                list_expr = ListExpr([self.temp_node(rv_type, context)
+                                      for rv_type in star_rv_types])
                 list_expr.set_line(context.get_line())
                 self.check_assignment(star_lv.expr, list_expr, infer_lvalue_type)
             for lv, rv_type in zip(right_lvs, right_rv_types):
                 self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type)
 
-    def lvalue_type_for_inference(self, lvalues: List[Node], rvalue_type: TupleType) -> Type:
+    def lvalue_type_for_inference(self, lvalues: List[Lvalue], rvalue_type: TupleType) -> Type:
         star_index = next((i for i, lv in enumerate(lvalues)
                            if isinstance(lv, StarExpr)), len(lvalues))
         left_lvs = lvalues[:star_index]
@@ -1184,7 +1202,7 @@ class TypeChecker(NodeVisitor[Type]):
 
         type_parameters = []  # type: List[Type]
 
-        def append_types_for_inference(lvs: List[Node], rv_types: List[Type]) -> None:
+        def append_types_for_inference(lvs: List[Expression], rv_types: List[Type]) -> None:
             for lv, rv_type in zip(lvs, rv_types):
                 sub_lvalue_type, index_expr, inferred = self.check_lvalue(lv)
                 if sub_lvalue_type:
@@ -1229,7 +1247,7 @@ class TypeChecker(NodeVisitor[Type]):
                                                         [AnyType()])) and
                 isinstance(type, Instance))
 
-    def check_multi_assignment_from_iterable(self, lvalues: List[Node], rvalue_type: Type,
+    def check_multi_assignment_from_iterable(self, lvalues: List[Lvalue], rvalue_type: Type,
                                              context: Context,
                                              infer_lvalue_type: bool = True) -> None:
         if self.type_is_iterable(rvalue_type):
@@ -1244,7 +1262,7 @@ class TypeChecker(NodeVisitor[Type]):
         else:
             self.msg.type_not_iterable(rvalue_type, context)
 
-    def check_lvalue(self, lvalue: Node) -> Tuple[Type, IndexExpr, Var]:
+    def check_lvalue(self, lvalue: Lvalue) -> Tuple[Type, IndexExpr, Var]:
         lvalue_type = None  # type: Type
         index_lvalue = None  # type: IndexExpr
         inferred = None  # type: Var
@@ -1274,7 +1292,7 @@ class TypeChecker(NodeVisitor[Type]):
 
         return lvalue_type, index_lvalue, inferred
 
-    def is_definition(self, s: Node) -> bool:
+    def is_definition(self, s: Lvalue) -> bool:
         if isinstance(s, NameExpr):
             if s.is_def:
                 return True
@@ -1290,13 +1308,10 @@ class TypeChecker(NodeVisitor[Type]):
             return s.is_def
         return False
 
-    def infer_variable_type(self, name: Var, lvalue: Node,
+    def infer_variable_type(self, name: Var, lvalue: Lvalue,
                             init_type: Type, context: Context) -> None:
         """Infer the type of initialized variables from initializer type."""
-        if self.typing_mode_weak():
-            self.set_inferred_type(name, lvalue, AnyType())
-            self.binder.assign_type(lvalue, init_type, self.binder.get_declaration(lvalue), True)
-        elif self.is_unusable_type(init_type):
+        if self.is_unusable_type(init_type):
             self.check_usable_type(init_type, context)
             self.set_inference_error_fallback_type(name, lvalue, init_type, context)
         elif isinstance(init_type, DeletedType):
@@ -1317,7 +1332,7 @@ class TypeChecker(NodeVisitor[Type]):
 
             self.set_inferred_type(name, lvalue, init_type)
 
-    def infer_partial_type(self, name: Var, lvalue: Node, init_type: Type) -> bool:
+    def infer_partial_type(self, name: Var, lvalue: Lvalue, init_type: Type) -> bool:
         if isinstance(init_type, (NoneTyp, UninhabitedType)):
             partial_type = PartialType(None, name, [init_type])
         elif isinstance(init_type, Instance):
@@ -1336,7 +1351,7 @@ class TypeChecker(NodeVisitor[Type]):
         self.partial_types[-1][name] = lvalue
         return True
 
-    def set_inferred_type(self, var: Var, lvalue: Node, type: Type) -> None:
+    def set_inferred_type(self, var: Var, lvalue: Lvalue, type: Type) -> None:
         """Store inferred variable type.
 
         Store the type to both the variable node and the expression node that
@@ -1346,7 +1361,7 @@ class TypeChecker(NodeVisitor[Type]):
             var.type = type
             self.store_type(lvalue, type)
 
-    def set_inference_error_fallback_type(self, var: Var, lvalue: Node, type: Type,
+    def set_inference_error_fallback_type(self, var: Var, lvalue: Lvalue, type: Type,
                                           context: Context) -> None:
         """If errors on context line are ignored, store dummy type for variable.
 
@@ -1361,7 +1376,7 @@ class TypeChecker(NodeVisitor[Type]):
         if context.get_line() in self.errors.ignored_lines[self.errors.file]:
             self.set_inferred_type(var, lvalue, AnyType())
 
-    def narrow_type_from_binder(self, expr: Node, known_type: Type) -> Type:
+    def narrow_type_from_binder(self, expr: Expression, known_type: Type) -> Type:
         if expr.literal >= LITERAL_TYPE:
             restriction = self.binder.get(expr)
             if restriction:
@@ -1369,8 +1384,8 @@ class TypeChecker(NodeVisitor[Type]):
                 return ans
         return known_type
 
-    def check_simple_assignment(self, lvalue_type: Type, rvalue: Node,
-                                context: Node,
+    def check_simple_assignment(self, lvalue_type: Type, rvalue: Expression,
+                                context: Context,
                                 msg: str = messages.INCOMPATIBLE_TYPES_IN_ASSIGNMENT,
                                 lvalue_name: str = 'variable',
                                 rvalue_name: str = 'expression') -> Type:
@@ -1381,8 +1396,6 @@ class TypeChecker(NodeVisitor[Type]):
             rvalue_type = self.accept(rvalue, lvalue_type)
             if isinstance(rvalue_type, DeletedType):
                 self.msg.deleted_as_rvalue(rvalue_type, context)
-            if self.typing_mode_weak():
-                return rvalue_type
             if isinstance(lvalue_type, DeletedType):
                 self.msg.deleted_as_lvalue(lvalue_type, context)
             else:
@@ -1392,7 +1405,7 @@ class TypeChecker(NodeVisitor[Type]):
             return rvalue_type
 
     def check_indexed_assignment(self, lvalue: IndexExpr,
-                                 rvalue: Node, context: Context) -> None:
+                                 rvalue: Expression, context: Context) -> None:
         """Type check indexed assignment base[index] = rvalue.
 
         The lvalue argument is the base[index] expression.
@@ -1407,7 +1420,7 @@ class TypeChecker(NodeVisitor[Type]):
                                      context)
 
     def try_infer_partial_type_from_indexed_assignment(
-            self, lvalue: IndexExpr, rvalue: Node) -> None:
+            self, lvalue: IndexExpr, rvalue: Expression) -> None:
         # TODO: Should we share some of this with try_infer_partial_type?
         if isinstance(lvalue.base, RefExpr) and isinstance(lvalue.base.node, Var):
             var = lvalue.base.node
@@ -1477,7 +1490,7 @@ class TypeChecker(NodeVisitor[Type]):
                 if isinstance(return_type, (Void, NoneTyp, AnyType)):
                     return None
 
-                if self.typing_mode_full():
+                if self.in_checked_function():
                     self.fail(messages.RETURN_VALUE_EXPECTED, s)
 
     def wrap_generic_type(self, typ: Instance, rtyp: Instance, check_type:
@@ -1512,10 +1525,7 @@ class TypeChecker(NodeVisitor[Type]):
             for e, b in zip(s.expr, s.body):
                 t = self.accept(e)
                 self.check_usable_type(t, e)
-                if_map, else_map = find_isinstance_check(
-                    e, self.type_map,
-                    self.typing_mode_weak()
-                )
+                if_map, else_map = find_isinstance_check(e, self.type_map)
                 if if_map is None:
                     # The condition is always false
                     # XXX should issue a warning?
@@ -1571,10 +1581,7 @@ class TypeChecker(NodeVisitor[Type]):
         self.accept(s.expr)
 
         # If this is asserting some isinstance check, bind that type in the following code
-        true_map, _ = find_isinstance_check(
-            s.expr, self.type_map,
-            self.typing_mode_weak()
-        )
+        true_map, _ = find_isinstance_check(s.expr, self.type_map)
 
         if true_map:
             for var, type in true_map.items():
@@ -1588,7 +1595,7 @@ class TypeChecker(NodeVisitor[Type]):
         if s.from_expr:
             self.type_check_raise(s.from_expr, s)
 
-    def type_check_raise(self, e: Node, s: RaiseStmt) -> None:
+    def type_check_raise(self, e: Expression, s: RaiseStmt) -> None:
         typ = self.accept(e)
         if isinstance(typ, FunctionLike):
             if typ.is_type_obj():
@@ -1679,7 +1686,7 @@ class TypeChecker(NodeVisitor[Type]):
                 breaking_out = breaking_out and self.binder.last_pop_breaking_out
         return breaking_out
 
-    def visit_except_handler_test(self, n: Node) -> Type:
+    def visit_except_handler_test(self, n: Expression) -> Type:
         """Type check an exception handler test clause."""
         type = self.accept(n)
 
@@ -1715,7 +1722,7 @@ class TypeChecker(NodeVisitor[Type]):
         self.analyze_index_variables(s.index, item_type, s)
         self.accept_loop(s.body, s.else_body)
 
-    def analyze_async_iterable_item_type(self, expr: Node) -> Type:
+    def analyze_async_iterable_item_type(self, expr: Expression) -> Type:
         """Analyse async iterable expression and return iterator item type."""
         iterable = self.accept(expr)
 
@@ -1734,7 +1741,7 @@ class TypeChecker(NodeVisitor[Type]):
         return self.check_awaitable_expr(awaitable, expr,
                                          messages.INCOMPATIBLE_TYPES_IN_ASYNC_FOR)
 
-    def analyze_iterable_item_type(self, expr: Node) -> Type:
+    def analyze_iterable_item_type(self, expr: Expression) -> Type:
         """Analyse iterable expression and return iterator item type."""
         iterable = self.accept(expr)
 
@@ -1769,7 +1776,7 @@ class TypeChecker(NodeVisitor[Type]):
                                                          expr)
             return echk.check_call(method, [], [], expr)[0]
 
-    def analyze_index_variables(self, index: Node, item_type: Type,
+    def analyze_index_variables(self, index: Expression, item_type: Type,
                                 context: Context) -> None:
         """Type check or infer for loop or list comprehension index vars."""
         self.check_assignment(index, self.temp_node(item_type, context))
@@ -1783,7 +1790,7 @@ class TypeChecker(NodeVisitor[Type]):
             c.line = s.line
             return c.accept(self)
         else:
-            def flatten(t: Node) -> List[Node]:
+            def flatten(t: Expression) -> List[Expression]:
                 """Flatten a nested sequence of tuples/lists into one list of nodes."""
                 if isinstance(t, TupleExpr) or isinstance(t, ListExpr):
                     return [b for a in t.items for b in flatten(a)]
@@ -1796,7 +1803,7 @@ class TypeChecker(NodeVisitor[Type]):
                     self.binder.assign_type(elt,
                                             DeletedType(source=elt.name),
                                             self.binder.get_declaration(elt),
-                                            self.typing_mode_weak())
+                                            False)
             return None
 
     def visit_decorator(self, e: Decorator) -> Type:
@@ -2091,7 +2098,7 @@ class TypeChecker(NodeVisitor[Type]):
         expected_item_type = self.get_generator_yield_type(return_type, False)
         if e.expr is None:
             if (not isinstance(expected_item_type, (Void, NoneTyp, AnyType))
-                    and self.typing_mode_full()):
+                    and self.in_checked_function()):
                 self.fail(messages.YIELD_VALUE_EXPECTED, e)
         else:
             actual_item_type = self.accept(e.expr, expected_item_type)
@@ -2125,6 +2132,8 @@ class TypeChecker(NodeVisitor[Type]):
             if self.is_unusable_type(subtype):
                 self.msg.does_not_return_value(subtype, context)
             else:
+                if self.should_suppress_optional_error([subtype]):
+                    return False
                 extra_info = []  # type: List[str]
                 if subtype_label is not None or supertype_label is not None:
                     subtype_str, supertype_str = self.msg.format_distinctly(subtype, supertype)
@@ -2137,6 +2146,17 @@ class TypeChecker(NodeVisitor[Type]):
                 self.fail(msg, context)
             return False
 
+    def contains_none(self, t: Type):
+        return (
+            isinstance(t, NoneTyp) or
+            (isinstance(t, UnionType) and any(self.contains_none(ut) for ut in t.items)) or
+            (isinstance(t, TupleType) and any(self.contains_none(tt) for tt in t.items)) or
+            (isinstance(t, Instance) and t.args and any(self.contains_none(it) for it in t.args))
+        )
+
+    def should_suppress_optional_error(self, related_types: List[Type]) -> bool:
+        return self.suppress_none_errors and any(self.contains_none(t) for t in related_types)
+
     def named_type(self, name: str) -> Instance:
         """Return an instance type with type given by the name and no
         type arguments. For example, named_type('builtins.object')
@@ -2186,33 +2206,20 @@ class TypeChecker(NodeVisitor[Type]):
     def store_type(self, node: Node, typ: Type) -> None:
         """Store the type of a node in the type map."""
         self.type_map[node] = typ
+        if typ is not None:
+            self.module_type_map[node] = typ
 
-    def typing_mode_none(self) -> bool:
-        if self.is_dynamic_function() and not self.options.check_untyped_defs:
-            return not self.weak_opts
-        elif self.function_stack:
-            return False
-        else:
-            return False
+    def in_checked_function(self) -> bool:
+        """Should we type-check the current function?
 
-    def typing_mode_weak(self) -> bool:
-        if self.is_dynamic_function() and not self.options.check_untyped_defs:
-            return bool(self.weak_opts)
-        elif self.function_stack:
-            return False
-        else:
-            return 'global' in self.weak_opts
-
-    def typing_mode_full(self) -> bool:
-        if self.is_dynamic_function() and not self.options.check_untyped_defs:
-            return False
-        elif self.function_stack:
-            return True
-        else:
-            return 'global' not in self.weak_opts
-
-    def is_dynamic_function(self) -> bool:
-        return len(self.dynamic_funcs) > 0 and self.dynamic_funcs[-1]
+        - Yes if --check-untyped-defs is set.
+        - Yes outside functions.
+        - Yes in annotated functions.
+        - No otherwise.
+        """
+        return (self.options.check_untyped_defs
+                or not self.dynamic_funcs
+                or not self.dynamic_funcs[-1])
 
     def lookup(self, name: str, kind: int) -> SymbolTableNode:
         """Look up a definition from the symbol table with the given name.
@@ -2236,7 +2243,16 @@ class TypeChecker(NodeVisitor[Type]):
             n = self.modules[parts[0]]
             for i in range(1, len(parts) - 1):
                 n = cast(MypyFile, n.names.get(parts[i], None).node)
-            return n.names[parts[-1]]
+            last = parts[-1]
+            if last in n.names:
+                return n.names[last]
+            elif len(parts) == 2 and parts[0] == 'builtins':
+                raise KeyError("Could not find builtin symbol '{}'. (Are you running a "
+                               "test case? If so, make sure to include a fixture that "
+                               "defines this symbol.)".format(last))
+            else:
+                msg = "Failed qualified lookup: '{}' (fullname = '{}')."
+                raise KeyError(msg.format(last, name))
 
     def enter_partial_types(self) -> None:
         """Push a new scope for collecting partial types."""
@@ -2284,7 +2300,7 @@ class TypeChecker(NodeVisitor[Type]):
         if self.is_unusable_type(typ):
             self.msg.does_not_return_value(typ, context)
 
-    def temp_node(self, t: Type, context: Context = None) -> Node:
+    def temp_node(self, t: Type, context: Context = None) -> TempNode:
         """Create a temporary node with the given, fixed type."""
         temp = TempNode(t)
         if context:
@@ -2322,16 +2338,12 @@ class TypeChecker(NodeVisitor[Type]):
 # probably be better to have the dict keyed by the nodes' literal_hash
 # field instead.
 
-# NB: This should be `TypeMap = Optional[Dict[Node, Type]]`!
-# But see https://github.com/python/mypy/issues/1637
-TypeMap = Dict[Node, Type]
+TypeMap = Optional[Dict[Node, Type]]
 
 
-def conditional_type_map(expr: Node,
+def conditional_type_map(expr: Expression,
                          current_type: Optional[Type],
                          proposed_type: Optional[Type],
-                         *,
-                         weak: bool = False
                          ) -> Tuple[TypeMap, TypeMap]:
     """Takes in an expression, the current type of the expression, and a
     proposed type of that expression.
@@ -2353,13 +2365,10 @@ def conditional_type_map(expr: Node,
             return {expr: proposed_type}, {}
     else:
         # An isinstance check, but we don't understand the type
-        if weak:
-            return {expr: AnyType()}, {expr: current_type}
-        else:
-            return {}, {}
+        return {}, {}
 
 
-def is_literal_none(n: Node) -> bool:
+def is_literal_none(n: Expression) -> bool:
     return isinstance(n, NameExpr) and n.fullname == 'builtins.None'
 
 
@@ -2407,9 +2416,8 @@ def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
     return result
 
 
-def find_isinstance_check(node: Node,
+def find_isinstance_check(node: Expression,
                           type_map: Dict[Node, Type],
-                          weak: bool=False
                           ) -> Tuple[TypeMap, TypeMap]:
     """Find any isinstance checks (within a chain of ands).  Includes
     implicit and explicit checks for None.
@@ -2428,7 +2436,7 @@ def find_isinstance_check(node: Node,
             if expr.literal == LITERAL_TYPE:
                 vartype = type_map[expr]
                 type = get_isinstance_type(node.args[1], type_map)
-                return conditional_type_map(expr, vartype, type, weak=weak)
+                return conditional_type_map(expr, vartype, type)
     elif (isinstance(node, ComparisonExpr) and any(is_literal_none(n) for n in node.operands) and
           experiments.STRICT_OPTIONAL):
         # Check for `x is None` and `x is not None`.
@@ -2442,7 +2450,7 @@ def find_isinstance_check(node: Node,
                     # two elements in node.operands, and at least one of them
                     # should represent a None.
                     vartype = type_map[expr]
-                    if_vars, else_vars = conditional_type_map(expr, vartype, NoneTyp(), weak=weak)
+                    if_vars, else_vars = conditional_type_map(expr, vartype, NoneTyp())
                     break
 
             if is_not:
@@ -2459,49 +2467,31 @@ def find_isinstance_check(node: Node,
         else_map = {ref: else_type} if not isinstance(else_type, UninhabitedType) else None
         return if_map, else_map
     elif isinstance(node, OpExpr) and node.op == 'and':
-        left_if_vars, left_else_vars = find_isinstance_check(
-            node.left,
-            type_map,
-            weak,
-        )
-
-        right_if_vars, right_else_vars = find_isinstance_check(
-            node.right,
-            type_map,
-            weak,
-        )
+        left_if_vars, left_else_vars = find_isinstance_check(node.left, type_map)
+        right_if_vars, right_else_vars = find_isinstance_check(node.right, type_map)
 
         # (e1 and e2) is true if both e1 and e2 are true,
         # and false if at least one of e1 and e2 is false.
         return (and_conditional_maps(left_if_vars, right_if_vars),
                 or_conditional_maps(left_else_vars, right_else_vars))
     elif isinstance(node, OpExpr) and node.op == 'or':
-        left_if_vars, left_else_vars = find_isinstance_check(
-            node.left,
-            type_map,
-            weak,
-        )
-
-        right_if_vars, right_else_vars = find_isinstance_check(
-            node.right,
-            type_map,
-            weak,
-        )
+        left_if_vars, left_else_vars = find_isinstance_check(node.left, type_map)
+        right_if_vars, right_else_vars = find_isinstance_check(node.right, type_map)
 
         # (e1 or e2) is true if at least one of e1 or e2 is true,
         # and false if both e1 and e2 are false.
         return (or_conditional_maps(left_if_vars, right_if_vars),
                 and_conditional_maps(left_else_vars, right_else_vars))
     elif isinstance(node, UnaryExpr) and node.op == 'not':
-        left, right = find_isinstance_check(node.expr, type_map, weak)
+        left, right = find_isinstance_check(node.expr, type_map)
         return right, left
 
     # Not a supported isinstance check
     return {}, {}
 
 
-def get_isinstance_type(node: Node, type_map: Dict[Node, Type]) -> Type:
-    type = type_map[node]
+def get_isinstance_type(expr: Expression, type_map: Dict[Node, Type]) -> Type:
+    type = type_map[expr]
 
     if isinstance(type, TupleType):
         all_types = type.items
@@ -2527,7 +2517,7 @@ def get_isinstance_type(node: Node, type_map: Dict[Node, Type]) -> Type:
         return UnionType(types)
 
 
-def expand_node(defn: Node, map: Dict[TypeVarId, Type]) -> Node:
+def expand_node(defn: FuncItem, map: Dict[TypeVarId, Type]) -> Node:
     visitor = TypeTransformVisitor(map)
     return defn.accept(visitor)
 
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index f5b2013..cc31585 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -1,22 +1,22 @@
 """Expression type checker. This file is conceptually part of TypeChecker."""
 
-from typing import cast, Dict, List, Tuple, Callable, Union, Optional
+from typing import cast, Dict, Set, List, Iterable, Tuple, Callable, Union, Optional
 
 from mypy.types import (
     Type, AnyType, CallableType, Overloaded, NoneTyp, Void, TypeVarDef,
     TupleType, Instance, TypeVarId, TypeVarType, ErasedType, UnionType,
     PartialType, DeletedType, UnboundType, UninhabitedType, TypeType,
-    true_only, false_only
+    true_only, false_only, is_named_instance
 )
 from mypy.nodes import (
     NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr,
     Node, MemberExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr,
     OpExpr, UnaryExpr, IndexExpr, CastExpr, RevealTypeExpr, TypeApplication, ListExpr,
-    TupleExpr, DictExpr, FuncExpr, SuperExpr, SliceExpr, Context,
+    TupleExpr, DictExpr, FuncExpr, SuperExpr, SliceExpr, Context, Expression,
     ListComprehension, GeneratorExpr, SetExpr, MypyFile, Decorator,
     ConditionalExpr, ComparisonExpr, TempNode, SetComprehension,
     DictionaryComprehension, ComplexExpr, EllipsisExpr, StarExpr,
-    TypeAliasExpr, BackquoteExpr, ARG_POS, ARG_NAMED, ARG_STAR2
+    TypeAliasExpr, BackquoteExpr, ARG_POS, ARG_NAMED, ARG_STAR2, MODULE_REF,
 )
 from mypy.nodes import function_type
 from mypy import nodes
@@ -36,7 +36,7 @@ from mypy.semanal import self_type
 from mypy.constraints import get_actual_type
 from mypy.checkstrformat import StringFormatterChecker
 from mypy.expandtype import expand_type
-import mypy.checkexpr
+from mypy.util import split_module_names
 
 from mypy import experiments
 
@@ -46,6 +46,38 @@ ArgChecker = Callable[[Type, Type, int, Type, int, int, CallableType, Context, M
                       None]
 
 
+def extract_refexpr_names(expr: RefExpr) -> Set[str]:
+    """Recursively extracts all module references from a reference expression.
+
+    Note that currently, the only two subclasses of RefExpr are NameExpr and
+    MemberExpr."""
+    output = set()  # type: Set[str]
+    while expr.kind == MODULE_REF or expr.fullname is not None:
+        if expr.kind == MODULE_REF and expr.fullname is not None:
+            # If it's None, something's wrong (perhaps due to an
+            # import cycle or a supressed error).  For now we just
+            # skip it.
+            output.add(expr.fullname)
+
+        if isinstance(expr, NameExpr):
+            is_suppressed_import = isinstance(expr.node, Var) and expr.node.is_suppressed_import
+            if isinstance(expr.node, TypeInfo):
+                # Reference to a class or a nested class
+                output.update(split_module_names(expr.node.module_name))
+            elif expr.fullname is not None and '.' in expr.fullname and not is_suppressed_import:
+                # Everything else (that is not a silenced import within a class)
+                output.add(expr.fullname.rsplit('.', 1)[0])
+            break
+        elif isinstance(expr, MemberExpr):
+            if isinstance(expr.expr, RefExpr):
+                expr = expr.expr
+            else:
+                break
+        else:
+            raise AssertionError("Unknown RefExpr subclass: {}".format(type(expr)))
+    return output
+
+
 class Finished(Exception):
     """Raised if we can terminate overload argument check early (no match)."""
 
@@ -61,7 +93,7 @@ class ExpressionChecker:
     # This is shared with TypeChecker, but stored also here for convenience.
     msg = None  # type: MessageBuilder
 
-    strfrm_checker = None  # type: mypy.checkstrformat.StringFormatterChecker
+    strfrm_checker = None  # type: StringFormatterChecker
 
     def __init__(self,
                  chk: 'mypy.checker.TypeChecker',
@@ -69,13 +101,14 @@ class ExpressionChecker:
         """Construct an expression type checker."""
         self.chk = chk
         self.msg = msg
-        self.strfrm_checker = mypy.checkexpr.StringFormatterChecker(self, self.chk, self.msg)
+        self.strfrm_checker = StringFormatterChecker(self, self.chk, self.msg)
 
     def visit_name_expr(self, e: NameExpr) -> Type:
         """Type check a name expression.
 
         It can be of any kind: local, member or global.
         """
+        self.chk.module_refs.update(extract_refexpr_names(e))
         result = self.analyze_ref_expr(e)
         return self.chk.narrow_type_from_binder(e, result)
 
@@ -118,7 +151,7 @@ class ExpressionChecker:
 
     def analyze_var_ref(self, var: Var, context: Context) -> Type:
         if not var.type:
-            if not var.is_ready and self.chk.typing_mode_full():
+            if not var.is_ready and self.chk.in_checked_function():
                 self.chk.handle_cannot_determine_type(var.name(), context)
             # Implicit 'Any' type.
             return AnyType()
@@ -138,7 +171,7 @@ class ExpressionChecker:
         self.try_infer_partial_type(e)
         callee_type = self.accept(e.callee)
         if (self.chk.options.disallow_untyped_calls and
-                self.chk.typing_mode_full() and
+                self.chk.in_checked_function() and
                 isinstance(callee_type, CallableType)
                 and callee_type.implicit):
             return self.msg.untyped_function_call(callee_type, e)
@@ -204,10 +237,10 @@ class ExpressionChecker:
         return self.check_call(callee_type, e.args, e.arg_kinds, e,
                                e.arg_names, callable_node=e.callee)[0]
 
-    def check_call(self, callee: Type, args: List[Node],
+    def check_call(self, callee: Type, args: List[Expression],
                    arg_kinds: List[int], context: Context,
                    arg_names: List[str] = None,
-                   callable_node: Node = None,
+                   callable_node: Expression = None,
                    arg_messages: MessageBuilder = None) -> Tuple[Type, Type]:
         """Type check a call.
 
@@ -276,7 +309,7 @@ class ExpressionChecker:
                                                messages=arg_messages)
             return self.check_call(target, args, arg_kinds, context, arg_names,
                                    arg_messages=arg_messages)
-        elif isinstance(callee, AnyType) or self.chk.typing_mode_none():
+        elif isinstance(callee, AnyType) or not self.chk.in_checked_function():
             self.infer_arg_types_in_context(None, args)
             return AnyType(), AnyType()
         elif isinstance(callee, UnionType):
@@ -290,7 +323,7 @@ class ExpressionChecker:
         elif isinstance(callee, Instance):
             call_function = analyze_member_access('__call__', callee, context,
                                          False, False, False, self.named_type,
-                                         self.not_ready_callback, self.msg)
+                                         self.not_ready_callback, self.msg, chk=self.chk)
             return self.check_call(call_function, args, arg_kinds, context, arg_names,
                                    callable_node, arg_messages)
         elif isinstance(callee, TypeVarType):
@@ -340,7 +373,7 @@ class ExpressionChecker:
         return AnyType()
 
     def infer_arg_types_in_context(self, callee: Optional[CallableType],
-                                   args: List[Node]) -> List[Type]:
+                                   args: List[Expression]) -> List[Type]:
         """Infer argument expression types using a callable type as context.
 
         For example, if callee argument 2 has type List[int], infer the
@@ -372,7 +405,7 @@ class ExpressionChecker:
         return res
 
     def infer_arg_types_in_context2(
-            self, callee: CallableType, args: List[Node], arg_kinds: List[int],
+            self, callee: CallableType, args: List[Expression], arg_kinds: List[int],
             formal_to_actual: List[List[int]]) -> List[Type]:
         """Infer argument expression types using a callable type as context.
 
@@ -438,7 +471,7 @@ class ExpressionChecker:
                                                            error_context))
 
     def infer_function_type_arguments(self, callee_type: CallableType,
-                                      args: List[Node],
+                                      args: List[Expression],
                                       arg_kinds: List[int],
                                       formal_to_actual: List[List[int]],
                                       context: Context) -> CallableType:
@@ -448,7 +481,7 @@ class ExpressionChecker:
 
         Return a derived callable type that has the arguments applied.
         """
-        if not self.chk.typing_mode_none():
+        if self.chk.in_checked_function():
             # Disable type errors during type inference. There may be errors
             # due to partial available context information at this time, but
             # these errors can be safely ignored as the arguments will be
@@ -472,7 +505,7 @@ class ExpressionChecker:
 
             inferred_args = infer_function_type_arguments(
                 callee_type, pass1_args, arg_kinds, formal_to_actual,
-                strict=self.chk.typing_mode_full())  # type: List[Type]
+                strict=self.chk.in_checked_function())  # type: List[Type]
 
             if 2 in arg_pass_nums:
                 # Second pass of type inference.
@@ -503,7 +536,7 @@ class ExpressionChecker:
 
     def infer_function_type_arguments_pass2(
             self, callee_type: CallableType,
-            args: List[Node],
+            args: List[Expression],
             arg_kinds: List[int],
             formal_to_actual: List[List[int]],
             inferred_args: List[Type],
@@ -634,13 +667,13 @@ class ExpressionChecker:
             elif kind in [nodes.ARG_POS, nodes.ARG_OPT,
                           nodes.ARG_NAMED] and is_duplicate_mapping(
                     formal_to_actual[i], actual_kinds):
-                if (self.chk.typing_mode_full() or
+                if (self.chk.in_checked_function() or
                         isinstance(actual_types[formal_to_actual[i][0]], TupleType)):
                     if messages:
                         messages.duplicate_argument_value(callee, i, context)
                     ok = False
             elif (kind == nodes.ARG_NAMED and formal_to_actual[i] and
-                  actual_kinds[formal_to_actual[i][0]] != nodes.ARG_NAMED):
+                  actual_kinds[formal_to_actual[i][0]] not in [nodes.ARG_NAMED, nodes.ARG_STAR2]):
                 # Positional argument when expecting a keyword argument.
                 if messages:
                     messages.too_many_positional_arguments(callee, context)
@@ -710,6 +743,8 @@ class ExpressionChecker:
         elif isinstance(caller_type, DeletedType):
             messages.deleted_as_rvalue(caller_type, context)
         elif not is_subtype(caller_type, callee_type):
+            if self.chk.should_suppress_optional_error([caller_type, callee_type]):
+                return
             messages.incompatible_argument(n, m, callee, original_caller_type,
                                            caller_kind, context)
 
@@ -758,7 +793,8 @@ class ExpressionChecker:
                     match.append(typ)
                 best_match = max(best_match, similarity)
         if not match:
-            messages.no_variant_matches_arguments(overload, arg_types, context)
+            if not self.chk.should_suppress_optional_error(arg_types):
+                messages.no_variant_matches_arguments(overload, arg_types, context)
             return AnyType()
         else:
             if len(match) == 1:
@@ -859,6 +895,7 @@ class ExpressionChecker:
 
     def visit_member_expr(self, e: MemberExpr) -> Type:
         """Visit member expression (of form e.id)."""
+        self.chk.module_refs.update(extract_refexpr_names(e))
         result = self.analyze_ordinary_member_access(e, False)
         return self.chk.narrow_type_from_binder(e, result)
 
@@ -872,7 +909,8 @@ class ExpressionChecker:
             # This is a reference to a non-module attribute.
             return analyze_member_access(e.name, self.accept(e.expr), e,
                                          is_lvalue, False, False,
-                                         self.named_type, self.not_ready_callback, self.msg)
+                                         self.named_type, self.not_ready_callback, self.msg,
+                                         chk=self.chk)
 
     def analyze_external_member_access(self, member: str, base_type: Type,
                                        context: Context) -> Type:
@@ -881,7 +919,8 @@ class ExpressionChecker:
         """
         # TODO remove; no private definitions in mypy
         return analyze_member_access(member, base_type, context, False, False, False,
-                                     self.named_type, self.not_ready_callback, self.msg)
+                                     self.named_type, self.not_ready_callback, self.msg,
+                                     chk=self.chk)
 
     def visit_int_expr(self, e: IntExpr) -> Type:
         """Type check an integer literal (trivial)."""
@@ -926,8 +965,16 @@ class ExpressionChecker:
         if e.op == '*' and isinstance(e.left, ListExpr):
             # Expressions of form [...] * e get special type inference.
             return self.check_list_multiply(e)
-        if e.op == '%' and isinstance(e.left, (StrExpr, BytesExpr)):
-            return self.strfrm_checker.check_str_interpolation(cast(StrExpr, e.left), e.right)
+        if e.op == '%':
+            pyversion = self.chk.options.python_version
+            if pyversion[0] == 3:
+                if isinstance(e.left, BytesExpr) and pyversion[1] >= 5:
+                    return self.strfrm_checker.check_str_interpolation(e.left, e.right)
+                if isinstance(e.left, StrExpr):
+                    return self.strfrm_checker.check_str_interpolation(e.left, e.right)
+            elif pyversion[0] <= 2:
+                if isinstance(e.left, (StrExpr, BytesExpr, UnicodeExpr)):
+                    return self.strfrm_checker.check_str_interpolation(e.left, e.right)
         left_type = self.accept(e.left)
 
         if e.op in nodes.op_methods:
@@ -1012,7 +1059,7 @@ class ExpressionChecker:
         else:
             return nodes.op_methods[op]
 
-    def _check_op_for_errors(self, method: str, base_type: Type, arg: Node,
+    def _check_op_for_errors(self, method: str, base_type: Type, arg: Expression,
                              context: Context
                              ) -> Tuple[Tuple[Type, Type], MessageBuilder]:
         """Type check a binary operation which maps to a method call.
@@ -1026,18 +1073,19 @@ class ExpressionChecker:
                                      local_errors)
         return result, local_errors
 
-    def check_op_local(self, method: str, base_type: Type, arg: Node,
+    def check_op_local(self, method: str, base_type: Type, arg: Expression,
                        context: Context, local_errors: MessageBuilder) -> Tuple[Type, Type]:
         """Type check a binary operation which maps to a method call.
 
         Return tuple (result type, inferred operator method type).
         """
         method_type = analyze_member_access(method, base_type, context, False, False, True,
-                                            self.named_type, self.not_ready_callback, local_errors)
+                                            self.named_type, self.not_ready_callback, local_errors,
+                                            chk=self.chk)
         return self.check_call(method_type, [arg], [nodes.ARG_POS],
                                context, arg_messages=local_errors)
 
-    def check_op(self, method: str, base_type: Type, arg: Node,
+    def check_op(self, method: str, base_type: Type, arg: Expression,
                  context: Context,
                  allow_reverse: bool = False) -> Tuple[Type, Type]:
         """Type check a binary operation which maps to a method call.
@@ -1058,10 +1106,7 @@ class ExpressionChecker:
                     # If the right operand has type Any, we can't make any
                     # conjectures about the type of the result, since the
                     # operand could have a __r method that returns anything.
-
-                    # However, in weak mode, we do make conjectures.
-                    if not self.chk.typing_mode_weak():
-                        result = AnyType(), result[1]
+                    result = AnyType(), result[1]
             success = not local_errors.is_errors()
         else:
             result = AnyType(), AnyType()
@@ -1144,14 +1189,12 @@ class ExpressionChecker:
 
         if e.op == 'and':
             right_map, left_map = \
-                mypy.checker.find_isinstance_check(e.left, self.chk.type_map,
-                                                   self.chk.typing_mode_weak())
+                mypy.checker.find_isinstance_check(e.left, self.chk.type_map)
             restricted_left_type = false_only(left_type)
             result_is_left = not left_type.can_be_true
         elif e.op == 'or':
             left_map, right_map = \
-                mypy.checker.find_isinstance_check(e.left, self.chk.type_map,
-                                                   self.chk.typing_mode_weak())
+                mypy.checker.find_isinstance_check(e.left, self.chk.type_map)
             restricted_left_type = true_only(left_type)
             result_is_left = not left_type.can_be_false
 
@@ -1237,9 +1280,9 @@ class ExpressionChecker:
             # It's actually a type application.
             return self.accept(e.analyzed)
         left_type = self.accept(e.base)
-        if isinstance(left_type, TupleType) and self.chk.typing_mode_full():
+        if isinstance(left_type, TupleType) and self.chk.in_checked_function():
             # Special case for tuples. They support indexing only by integer
-            # literals.  (Except in weak type checking mode.)
+            # literals.
             index = e.index
             if isinstance(index, SliceExpr):
                 return self.visit_tuple_slice_helper(left_type, index)
@@ -1297,10 +1340,9 @@ class ExpressionChecker:
                     slic.stride)
                 return AnyType()
 
-        return TupleType(left_type.items[begin:end:stride], left_type.fallback,
-                    left_type.line, left_type.implicit)
+        return left_type.slice(begin, stride, end)
 
-    def _get_value(self, index: Node) -> Optional[int]:
+    def _get_value(self, index: Expression) -> Optional[int]:
         if isinstance(index, IntExpr):
             return index.value
         elif isinstance(index, UnaryExpr):
@@ -1347,7 +1389,7 @@ class ExpressionChecker:
     def visit_set_expr(self, e: SetExpr) -> Type:
         return self.check_lst_expr(e.items, 'builtins.set', '<set>', e)
 
-    def check_lst_expr(self, items: List[Node], fullname: str,
+    def check_lst_expr(self, items: List[Expression], fullname: str,
                        tag: str, context: Context) -> Type:
         # Translate into type checking a generic function call.
         # Used for list and set expressions, as well as for tuples
@@ -1372,11 +1414,26 @@ class ExpressionChecker:
 
     def visit_tuple_expr(self, e: TupleExpr) -> Type:
         """Type check a tuple expression."""
-        ctx = None  # type: TupleType
         # Try to determine type context for type inference.
-        if isinstance(self.chk.type_context[-1], TupleType):
-            t = self.chk.type_context[-1]
-            ctx = t
+        type_context = self.chk.type_context[-1]
+        type_context_items = None
+        if isinstance(type_context, UnionType):
+            tuples_in_context = [t for t in type_context.items
+                                 if (isinstance(t, TupleType) and len(t.items) == len(e.items)) or
+                                 is_named_instance(t, 'builtins.tuple')]
+            if len(tuples_in_context) == 1:
+                type_context = tuples_in_context[0]
+            else:
+                # There are either no relevant tuples in the Union, or there is
+                # more than one.  Either way, we can't decide on a context.
+                pass
+
+        if isinstance(type_context, TupleType):
+            type_context_items = type_context.items
+        elif is_named_instance(type_context, 'builtins.tuple'):
+            assert isinstance(type_context, Instance)
+            if type_context.args:
+                type_context_items = [type_context.args[0]] * len(e.items)
         # NOTE: it's possible for the context to have a different
         # number of items than e.  In that case we use those context
         # items that match a position in e, and we'll worry about type
@@ -1385,7 +1442,7 @@ class ExpressionChecker:
         # Infer item types.  Give up if there's a star expression
         # that's not a Tuple.
         items = []  # type: List[Type]
-        j = 0  # Index into ctx.items; irrelevant if ctx is None.
+        j = 0  # Index into type_context_items; irrelevant if type_context_items is none
         for i in range(len(e.items)):
             item = e.items[i]
             tt = None  # type: Type
@@ -1405,10 +1462,10 @@ class ExpressionChecker:
                     # Treat the whole thing as a variable-length tuple.
                     return self.check_lst_expr(e.items, 'builtins.tuple', '<tuple>', e)
             else:
-                if not ctx or j >= len(ctx.items):
+                if not type_context_items or j >= len(type_context_items):
                     tt = self.accept(item)
                 else:
-                    tt = self.accept(item, ctx.items[j])
+                    tt = self.accept(item, type_context_items[j])
                     j += 1
                 self.check_usable_type(tt, e)
                 items.append(tt)
@@ -1421,8 +1478,8 @@ class ExpressionChecker:
         Translate it into a call to dict(), with provisions for **expr.
         """
         # Collect function arguments, watching out for **expr.
-        args = []  # type: List[Node]  # Regular "key: value"
-        stargs = []  # type: List[Node]  # For "**expr"
+        args = []  # type: List[Expression]  # Regular "key: value"
+        stargs = []  # type: List[Expression]  # For "**expr"
         for key, value in e.items:
             if key is None:
                 stargs.append(value)
@@ -1551,12 +1608,12 @@ class ExpressionChecker:
                         # There's an undefined base class, and we're
                         # at the end of the chain.  That's not an error.
                         return AnyType()
-                    if not self.chk.typing_mode_full():
+                    if not self.chk.in_checked_function():
                         return AnyType()
                     return analyze_member_access(e.name, self_type(e.info), e,
                                                  is_lvalue, True, False,
                                                  self.named_type, self.not_ready_callback,
-                                                 self.msg, base)
+                                                 self.msg, base, chk=self.chk)
         else:
             # Invalid super. This has been reported by the semantic analyzer.
             return AnyType()
@@ -1639,10 +1696,7 @@ class ExpressionChecker:
                 self.accept(condition)
 
                 # values are only part of the comprehension when all conditions are true
-                true_map, _ = mypy.checker.find_isinstance_check(
-                    condition, self.chk.type_map,
-                    self.chk.typing_mode_weak()
-                )
+                true_map, _ = mypy.checker.find_isinstance_check(condition, self.chk.type_map)
 
                 if true_map:
                     for var, type in true_map.items():
@@ -1655,10 +1709,7 @@ class ExpressionChecker:
 
         # Gain type information from isinstance if it is there
         # but only for the current expression
-        if_map, else_map = mypy.checker.find_isinstance_check(
-            e.cond,
-            self.chk.type_map,
-            self.chk.typing_mode_weak())
+        if_map, else_map = mypy.checker.find_isinstance_check(e.cond, self.chk.type_map)
 
         if_type = self.analyze_cond_branch(if_map, e.if_expr, context=ctx)
 
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 36768e0..7241b72 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -7,7 +7,7 @@ from mypy.types import (
     Overloaded, TypeVarType, TypeTranslator, UnionType, PartialType,
     DeletedType, NoneTyp, TypeType
 )
-from mypy.nodes import TypeInfo, FuncBase, Var, FuncDef, SymbolNode, Context
+from mypy.nodes import TypeInfo, FuncBase, Var, FuncDef, SymbolNode, Context, MypyFile
 from mypy.nodes import ARG_POS, ARG_STAR, ARG_STAR2, OpExpr, ComparisonExpr
 from mypy.nodes import function_type, Decorator, OverloadedFuncDef
 from mypy.messages import MessageBuilder
@@ -17,6 +17,8 @@ from mypy.nodes import method_type, method_type_with_fallback
 from mypy.semanal import self_type
 from mypy import messages
 from mypy import subtypes
+if False:  # import for forward declaration only
+    import mypy.checker
 
 
 def analyze_member_access(name: str,
@@ -29,7 +31,8 @@ def analyze_member_access(name: str,
                           not_ready_callback: Callable[[str, Context], None],
                           msg: MessageBuilder,
                           override_info: TypeInfo = None,
-                          report_type: Type = None) -> Type:
+                          report_type: Type = None,
+                          chk: 'mypy.checker.TypeChecker' = None) -> Type:
     """Analyse attribute access.
 
     This is a general operation that supports various different variations:
@@ -74,27 +77,30 @@ def analyze_member_access(name: str,
             return analyze_member_var_access(name, typ, info, node,
                                              is_lvalue, is_super, builtin_type,
                                              not_ready_callback, msg,
-                                             report_type=report_type)
+                                             report_type=report_type, chk=chk)
     elif isinstance(typ, AnyType):
         # The base object has dynamic type.
         return AnyType()
     elif isinstance(typ, NoneTyp):
+        if chk and chk.should_suppress_optional_error([typ]):
+            return AnyType()
         # The only attribute NoneType has are those it inherits from object
         return analyze_member_access(name, builtin_type('builtins.object'), node, is_lvalue,
                                      is_super, is_operator, builtin_type, not_ready_callback, msg,
-                                     report_type=report_type)
+                                     report_type=report_type, chk=chk)
     elif isinstance(typ, UnionType):
         # The base object has dynamic type.
         msg.disable_type_names += 1
         results = [analyze_member_access(name, subtype, node, is_lvalue, is_super,
-                                         is_operator, builtin_type, not_ready_callback, msg)
+                                         is_operator, builtin_type, not_ready_callback, msg,
+                                         chk=chk)
                    for subtype in typ.items]
         msg.disable_type_names -= 1
         return UnionType.make_simplified_union(results)
     elif isinstance(typ, TupleType):
         # Actually look up from the fallback instance type.
         return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
-                                     is_operator, builtin_type, not_ready_callback, msg)
+                                     is_operator, builtin_type, not_ready_callback, msg, chk=chk)
     elif isinstance(typ, FunctionLike) and typ.is_type_obj():
         # Class attribute.
         # TODO super?
@@ -123,18 +129,18 @@ def analyze_member_access(name: str,
             # Look up from the 'type' type.
             return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
                                          is_operator, builtin_type, not_ready_callback, msg,
-                                         report_type=report_type)
+                                         report_type=report_type, chk=chk)
         else:
             assert False, 'Unexpected type {}'.format(repr(ret_type))
     elif isinstance(typ, FunctionLike):
         # Look up from the 'function' type.
         return analyze_member_access(name, typ.fallback, node, is_lvalue, is_super,
                                      is_operator, builtin_type, not_ready_callback, msg,
-                                     report_type=report_type)
+                                     report_type=report_type, chk=chk)
     elif isinstance(typ, TypeVarType):
         return analyze_member_access(name, typ.upper_bound, node, is_lvalue, is_super,
                                      is_operator, builtin_type, not_ready_callback, msg,
-                                     report_type=report_type)
+                                     report_type=report_type, chk=chk)
     elif isinstance(typ, DeletedType):
         msg.deleted_as_rvalue(typ, node)
         return AnyType()
@@ -155,7 +161,10 @@ def analyze_member_access(name: str,
         fallback = builtin_type('builtins.type')
         return analyze_member_access(name, fallback, node, is_lvalue, is_super,
                                      is_operator, builtin_type, not_ready_callback, msg,
-                                     report_type=report_type)
+                                     report_type=report_type, chk=chk)
+
+    if chk and chk.should_suppress_optional_error([typ]):
+        return AnyType()
     return msg.has_no_attr(report_type, name, node)
 
 
@@ -164,7 +173,8 @@ def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
                               builtin_type: Callable[[str], Instance],
                               not_ready_callback: Callable[[str, Context], None],
                               msg: MessageBuilder,
-                              report_type: Type = None) -> Type:
+                              report_type: Type = None,
+                              chk: 'mypy.checker.TypeChecker' = None) -> Type:
     """Analyse attribute access that does not target a method.
 
     This is logically part of analyze_member_access and the arguments are
@@ -177,7 +187,6 @@ def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
     if isinstance(vv, Decorator):
         # The associated Var node of a decorator contains the type.
         v = vv.var
-
     if isinstance(v, Var):
         return analyze_var(name, v, itype, info, node, is_lvalue, msg, not_ready_callback)
     elif isinstance(v, FuncDef):
@@ -200,6 +209,8 @@ def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
         msg.undefined_in_superclass(name, node)
         return AnyType()
     else:
+        if chk and chk.should_suppress_optional_error([itype]):
+            return AnyType()
         return msg.has_no_attr(report_type or itype, name, node)
 
 
@@ -217,6 +228,9 @@ def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Cont
         if isinstance(typ, PartialType):
             return handle_partial_attribute_type(typ, is_lvalue, msg, var)
         t = expand_type_by_instance(typ, itype)
+        if is_lvalue and var.is_property and not var.is_settable_property:
+            # TODO allow setting attributes in subclass (although it is probably an error)
+            msg.read_only_property(name, info, node)
         if var.is_initialized_in_class and isinstance(t, FunctionLike):
             if is_lvalue:
                 if var.is_property:
@@ -337,6 +351,10 @@ def analyze_class_attribute_access(itype: Instance,
     if isinstance(node.node, TypeInfo):
         return type_object_type(node.node, builtin_type)
 
+    if isinstance(node.node, MypyFile):
+        # Reference to a module object.
+        return builtin_type('builtins.module')
+
     if is_decorated:
         # TODO: Return type of decorated function. This is quick hack to work around #998.
         return AnyType()
diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py
index 952256c..5af63e9 100644
--- a/mypy/checkstrformat.py
+++ b/mypy/checkstrformat.py
@@ -2,17 +2,18 @@
 
 import re
 
-from typing import cast, List, Tuple, Dict, Callable
+from typing import cast, List, Tuple, Dict, Callable, Union
 
 from mypy.types import (
     Type, AnyType, TupleType, Instance, UnionType
 )
 from mypy.nodes import (
-    Node, StrExpr, BytesExpr, TupleExpr, DictExpr, Context
+    Node, StrExpr, BytesExpr, UnicodeExpr, TupleExpr, DictExpr, Context
 )
 if False:
     # break import cycle only needed for mypy
     import mypy.checker
+    import mypy.checkexpr
 from mypy import messages
 from mypy.messages import MessageBuilder
 
@@ -54,7 +55,11 @@ class StringFormatterChecker:
         self.exprchk = exprchk
         self.msg = msg
 
-    def check_str_interpolation(self, str: StrExpr, replacements: Node) -> Type:
+    # TODO: In Python 3, the bytes formatting has a more restricted set of options
+    # compared to string formatting.
+    def check_str_interpolation(self,
+                                str: Union[StrExpr, BytesExpr, UnicodeExpr],
+                                replacements: Node) -> Type:
         """Check the types of the 'replacements' in a string interpolation
         expression: str % replacements
         """
@@ -66,10 +71,18 @@ class StringFormatterChecker:
             self.check_mapping_str_interpolation(specifiers, replacements)
         else:
             self.check_simple_str_interpolation(specifiers, replacements)
-        return self.named_type('builtins.str')
+
+        if isinstance(str, BytesExpr):
+            return self.named_type('builtins.bytes')
+        elif isinstance(str, UnicodeExpr):
+            return self.named_type('builtins.unicode')
+        elif isinstance(str, StrExpr):
+            return self.named_type('builtins.str')
+        else:
+            assert False
 
     def parse_conversion_specifiers(self, format: str) -> List[ConversionSpecifier]:
-        key_regex = r'(\((\w*)\))?'  # (optional) parenthesised sequence of characters
+        key_regex = r'(\(([^()]*)\))?'  # (optional) parenthesised sequence of characters
         flags_regex = r'([#0\-+ ]*)'  # (optional) sequence of flags
         width_regex = r'(\*|[1-9][0-9]*)?'  # (optional) minimum field width (* or numbers)
         precision_regex = r'(?:\.(\*|[0-9]+)?)?'  # (optional) . followed by * of numbers
diff --git a/mypy/defaults.py b/mypy/defaults.py
index 9ce210b..d9b6741 100644
--- a/mypy/defaults.py
+++ b/mypy/defaults.py
@@ -1,3 +1,4 @@
 PYTHON2_VERSION = (2, 7)
 PYTHON3_VERSION = (3, 5)
-MYPY_CACHE = '.mypy_cache'
+CACHE_DIR = '.mypy_cache'
+CONFIG_FILE = 'mypy.ini'
diff --git a/mypy/errors.py b/mypy/errors.py
index f9da1db..541e4ca 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -6,6 +6,8 @@ from collections import OrderedDict, defaultdict
 
 from typing import Tuple, List, TypeVar, Set, Dict, Optional
 
+from mypy.options import Options
+
 
 T = TypeVar('T')
 
@@ -29,6 +31,9 @@ class ErrorInfo:
     # The line number related to this error within file.
     line = 0     # -1 if unknown
 
+    # The column number related to this error with file.
+    column = 0   # -1 if unknown
+
     # Either 'error' or 'note'.
     severity = ''
 
@@ -42,13 +47,14 @@ class ErrorInfo:
     only_once = False
 
     def __init__(self, import_ctx: List[Tuple[str, int]], file: str, typ: str,
-                 function_or_member: str, line: int, severity: str, message: str,
-                 blocker: bool, only_once: bool) -> None:
+                 function_or_member: str, line: int, column: int, severity: str,
+                 message: str, blocker: bool, only_once: bool) -> None:
         self.import_ctx = import_ctx
         self.file = file
         self.type = typ
         self.function_or_member = function_or_member
         self.line = line
+        self.column = column
         self.severity = severity
         self.message = message
         self.blocker = blocker
@@ -90,9 +96,13 @@ class Errors:
     only_once_messages = None  # type: Set[str]
 
     # Set to True to suppress "In function "foo":" messages.
-    suppress_error_context = False  # type: bool
+    hide_error_context = False  # type: bool
+
+    # Set to True to show column numbers in error messages
+    show_column_numbers = False  # type: bool
 
-    def __init__(self, suppress_error_context: bool = False) -> None:
+    def __init__(self, hide_error_context: bool = False,
+                 show_column_numbers: bool = False) -> None:
         self.error_info = []
         self.import_ctx = []
         self.type_name = [None]
@@ -100,10 +110,11 @@ class Errors:
         self.ignored_lines = OrderedDict()
         self.used_ignored_lines = defaultdict(set)
         self.only_once_messages = set()
-        self.suppress_error_context = suppress_error_context
+        self.hide_error_context = hide_error_context
+        self.show_column_numbers = show_column_numbers
 
     def copy(self) -> 'Errors':
-        new = Errors(self.suppress_error_context)
+        new = Errors(self.hide_error_context, self.show_column_numbers)
         new.file = self.file
         new.import_ctx = self.import_ctx[:]
         new.type_name = self.type_name[:]
@@ -169,7 +180,7 @@ class Errors:
         """Replace the entire import context with a new value."""
         self.import_ctx = ctx[:]
 
-    def report(self, line: int, message: str, blocker: bool = False,
+    def report(self, line: int, column: int, message: str, blocker: bool = False,
                severity: str = 'error', file: str = None, only_once: bool = False) -> None:
         """Report message at the given line using the current error context.
 
@@ -187,7 +198,7 @@ class Errors:
         if file is None:
             file = self.file
         info = ErrorInfo(self.import_context(), file, type,
-                         self.function_or_member[-1], line, severity, message,
+                         self.function_or_member[-1], line, column, severity, message,
                          blocker, only_once)
         self.add_error_info(info)
 
@@ -210,7 +221,7 @@ class Errors:
                 for line in ignored_lines - self.used_ignored_lines[file]:
                     # Don't use report since add_error_info will ignore the error!
                     info = ErrorInfo(self.import_context(), file, None, None,
-                                    line, 'note', "unused 'type: ignore' comment",
+                                    line, -1, 'note', "unused 'type: ignore' comment",
                                     False, False)
                     self.error_info.append(info)
 
@@ -245,10 +256,13 @@ class Errors:
         a = []  # type: List[str]
         errors = self.render_messages(self.sort_messages(self.error_info))
         errors = self.remove_duplicates(errors)
-        for file, line, severity, message in errors:
+        for file, line, column, severity, message in errors:
             s = ''
             if file is not None:
-                if line is not None and line >= 0:
+                if self.show_column_numbers and line is not None and line >= 0 \
+                        and column is not None and column >= 0:
+                    srcloc = '{}:{}:{}'.format(file, line, column)
+                elif line is not None and line >= 0:
                     srcloc = '{}:{}'.format(file, line)
                 else:
                     srcloc = file
@@ -258,16 +272,17 @@ class Errors:
             a.append(s)
         return a
 
-    def render_messages(self, errors: List[ErrorInfo]) -> List[Tuple[str, int,
+    def render_messages(self, errors: List[ErrorInfo]) -> List[Tuple[str, int, int,
                                                                      str, str]]:
         """Translate the messages into a sequence of tuples.
 
-        Each tuple is of form (path, line, message.  The rendered
+        Each tuple is of form (path, line, col, message.  The rendered
         sequence includes information about error contexts. The path
         item may be None. If the line item is negative, the line
         number is not defined for the tuple.
         """
-        result = []  # type: List[Tuple[str, int, str, str]] # (path, line, severity, message)
+        result = []  # type: List[Tuple[str, int, int, str, str]]
+        # (path, line, column, severity, message)
 
         prev_import_context = []  # type: List[Tuple[str, int]]
         prev_function_or_member = None  # type: str
@@ -290,39 +305,39 @@ class Errors:
                     # Remove prefix to ignore from path (if present) to
                     # simplify path.
                     path = remove_path_prefix(path, self.ignore_prefix)
-                    result.append((None, -1, 'note', fmt.format(path, line)))
+                    result.append((None, -1, -1, 'note', fmt.format(path, line)))
                     i -= 1
 
             file = self.simplify_path(e.file)
 
             # Report context within a source file.
-            if self.suppress_error_context:
+            if self.hide_error_context:
                 pass
             elif (e.function_or_member != prev_function_or_member or
                     e.type != prev_type):
                 if e.function_or_member is None:
                     if e.type is None:
-                        result.append((file, -1, 'note', 'At top level:'))
+                        result.append((file, -1, -1, 'note', 'At top level:'))
                     else:
-                        result.append((file, -1, 'note', 'In class "{}":'.format(
+                        result.append((file, -1, -1, 'note', 'In class "{}":'.format(
                             e.type)))
                 else:
                     if e.type is None:
-                        result.append((file, -1, 'note',
+                        result.append((file, -1, -1, 'note',
                                        'In function "{}":'.format(
                                            e.function_or_member)))
                     else:
-                        result.append((file, -1, 'note',
+                        result.append((file, -1, -1, 'note',
                                        'In member "{}" of class "{}":'.format(
                                            e.function_or_member, e.type)))
             elif e.type != prev_type:
                 if e.type is None:
-                    result.append((file, -1, 'note', 'At top level:'))
+                    result.append((file, -1, -1, 'note', 'At top level:'))
                 else:
-                    result.append((file, -1, 'note',
+                    result.append((file, -1, -1, 'note',
                                    'In class "{}":'.format(e.type)))
 
-            result.append((file, e.line, e.severity, e.message))
+            result.append((file, e.line, e.column, e.severity, e.message))
 
             prev_import_context = e.import_ctx
             prev_function_or_member = e.function_or_member
@@ -348,22 +363,23 @@ class Errors:
                 i += 1
             i += 1
 
-            # Sort the errors specific to a file according to line number.
-            a = sorted(errors[i0:i], key=lambda x: x.line)
+            # Sort the errors specific to a file according to line number and column.
+            a = sorted(errors[i0:i], key=lambda x: (x.line, x.column))
             result.extend(a)
         return result
 
-    def remove_duplicates(self, errors: List[Tuple[str, int, str, str]]
-                          ) -> List[Tuple[str, int, str, str]]:
+    def remove_duplicates(self, errors: List[Tuple[str, int, int, str, str]]
+                          ) -> List[Tuple[str, int, int, str, str]]:
         """Remove duplicates from a sorted error list."""
-        res = []  # type: List[Tuple[str, int, str, str]]
+        res = []  # type: List[Tuple[str, int, int, str, str]]
         i = 0
         while i < len(errors):
             dup = False
             j = i - 1
             while (j >= 0 and errors[j][0] == errors[i][0] and
                     errors[j][1] == errors[i][1]):
-                if errors[j] == errors[i]:
+                if (errors[j][3] == errors[i][3] and
+                        errors[j][4] == errors[i][4]):  # ignore column
                     dup = True
                     break
                 j -= 1
@@ -406,24 +422,8 @@ def remove_path_prefix(path: str, prefix: str) -> str:
         return path
 
 
-# Corresponds to command-line flag --pdb.
-drop_into_pdb = False
-
-# Corresponds to command-line flag --show-traceback.
-show_tb = False
-
-
-def set_drop_into_pdb(flag: bool) -> None:
-    global drop_into_pdb
-    drop_into_pdb = flag
-
-
-def set_show_tb(flag: bool) -> None:
-    global show_tb
-    show_tb = flag
-
-
-def report_internal_error(err: Exception, file: str, line: int, errors: Errors) -> None:
+def report_internal_error(err: Exception, file: str, line: int,
+                          errors: Errors, options: Options) -> None:
     """Report internal error and exit.
 
     This optionally starts pdb or shows a traceback.
@@ -448,14 +448,14 @@ def report_internal_error(err: Exception, file: str, line: int, errors: Errors)
           file=sys.stderr)
 
     # If requested, drop into pdb. This overrides show_tb.
-    if drop_into_pdb:
+    if options.pdb:
         print('Dropping into pdb', file=sys.stderr)
         import pdb
         pdb.post_mortem(sys.exc_info()[2])
 
     # If requested, print traceback, else print note explaining how to get one.
-    if not show_tb:
-        if not drop_into_pdb:
+    if not options.show_traceback:
+        if not options.pdb:
             print('{}: note: please use --show-traceback to print a traceback '
                   'when reporting a bug'.format(prefix),
                   file=sys.stderr)
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 87b1641..c299163 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -66,14 +66,15 @@ class ExpandTypeVisitor(TypeVisitor[Type]):
 
     def visit_instance(self, t: Instance) -> Type:
         args = self.expand_types(t.args)
-        return Instance(t.type, args, t.line)
+        return Instance(t.type, args, t.line, t.column)
 
     def visit_type_var(self, t: TypeVarType) -> Type:
         repl = self.variables.get(t.id, t)
         if isinstance(repl, Instance):
             inst = repl
             # Return copy of instance with type erasure flag on.
-            return Instance(inst.type, inst.args, inst.line, True)
+            return Instance(inst.type, inst.args, line=inst.line,
+                            column=inst.column, erased=True)
         else:
             return repl
 
@@ -88,12 +89,12 @@ class ExpandTypeVisitor(TypeVisitor[Type]):
         return Overloaded(items)
 
     def visit_tuple_type(self, t: TupleType) -> Type:
-        return TupleType(self.expand_types(t.items), t.fallback, t.line)
+        return t.copy_modified(items=self.expand_types(t.items))
 
     def visit_union_type(self, t: UnionType) -> Type:
         # After substituting for type variables in t.items,
         # some of the resulting types might be subtypes of others.
-        return UnionType.make_simplified_union(self.expand_types(t.items), t.line)
+        return UnionType.make_simplified_union(self.expand_types(t.items), t.line, t.column)
 
     def visit_partial_type(self, t: PartialType) -> Type:
         return t
diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py
index dc95f3d..764c716 100644
--- a/mypy/exprtotype.py
+++ b/mypy/exprtotype.py
@@ -1,7 +1,8 @@
-"""Translate an expression (Node) to a Type value."""
+"""Translate an Expression to a Type value."""
 
 from mypy.nodes import (
-    Node, NameExpr, MemberExpr, IndexExpr, TupleExpr, ListExpr, StrExpr, BytesExpr, EllipsisExpr
+    Expression, NameExpr, MemberExpr, IndexExpr, TupleExpr,
+    ListExpr, StrExpr, BytesExpr, EllipsisExpr
 )
 from mypy.parsetype import parse_str_as_type, TypeParseError
 from mypy.types import Type, UnboundType, TypeList, EllipsisType
@@ -11,7 +12,7 @@ class TypeTranslationError(Exception):
     """Exception raised when an expression is not valid as a type."""
 
 
-def expr_to_unanalyzed_type(expr: Node) -> Type:
+def expr_to_unanalyzed_type(expr: Expression) -> Type:
     """Translate an expression to the corresponding type.
 
     The result is not semantically analyzed. It can be UnboundType or TypeList.
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index 2432317..85d7ac8 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -14,20 +14,18 @@ from mypy.nodes import (
     UnaryExpr, FuncExpr, ComparisonExpr,
     StarExpr, YieldFromExpr, NonlocalDecl, DictionaryComprehension,
     SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, Argument,
-    AwaitExpr,
+    AwaitExpr, TempNode, Expression, Statement,
     ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_STAR2
 )
 from mypy.types import (
-    Type, CallableType, FunctionLike, AnyType, UnboundType, TupleType, TypeList, EllipsisType,
+    Type, CallableType, AnyType, UnboundType, TupleType, TypeList, EllipsisType,
 )
 from mypy import defaults
 from mypy import experiments
 from mypy.errors import Errors
 
 try:
-    from typed_ast import ast27
     from typed_ast import ast35
-    from typed_ast import conversions
 except ImportError:
     if sys.version_info.minor > 2:
         print('You must install the typed_ast package before you can run mypy'
@@ -59,11 +57,8 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
     """
     is_stub_file = bool(fnam) and fnam.endswith('.pyi')
     try:
-        if pyversion[0] >= 3 or is_stub_file:
-            ast = ast35.parse(source, fnam, 'exec')
-        else:
-            ast2 = ast27.parse(source, fnam, 'exec')
-            ast = conversions.py2to3(ast2)
+        assert pyversion[0] >= 3 or is_stub_file
+        ast = ast35.parse(source, fnam, 'exec')
 
         tree = ASTConverter(pyversion=pyversion,
                             is_stub=is_stub_file,
@@ -75,22 +70,18 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
     except (SyntaxError, TypeCommentParseError) as e:
         if errors:
             errors.set_file('<input>' if fnam is None else fnam)
-            errors.report(e.lineno, e.msg)
+            errors.report(e.lineno, e.offset, e.msg)
         else:
             raise
 
-    return MypyFile([],
-                    [],
-                    False,
-                    set(),
-                    weak_opts=set())
+    return MypyFile([], [], False, set())
 
 
 def parse_type_comment(type_comment: str, line: int) -> Type:
     try:
         typ = ast35.parse(type_comment, '<type_comment>', 'eval')
-    except SyntaxError:
-        raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, line)
+    except SyntaxError as e:
+        raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, line, e.offset)
     else:
         assert isinstance(typ, ast35.Expression)
         return TypeConverter(line=line).visit(typ.body)
@@ -100,7 +91,7 @@ def with_line(f: Callable[['ASTConverter', T], U]) -> Callable[['ASTConverter',
     @wraps(f)
     def wrapper(self: 'ASTConverter', ast: T) -> U:
         node = f(self, ast)
-        node.set_line(ast.lineno)
+        node.set_line(ast.lineno, ast.col_offset)
         return node
     return wrapper
 
@@ -130,8 +121,21 @@ class ASTConverter(ast35.NodeTransformer):
     def visit_NoneType(self, n: Any) -> Optional[Node]:
         return None
 
-    def visit_list(self, l: Sequence[ast35.AST]) -> List[Node]:
-        return [self.visit(e) for e in l]
+    def translate_expr_list(self, l: Sequence[ast35.AST]) -> List[Expression]:
+        res = []  # type: List[Expression]
+        for e in l:
+            exp = self.visit(e)
+            assert exp is None or isinstance(exp, Expression)
+            res.append(exp)
+        return res
+
+    def translate_stmt_list(self, l: Sequence[ast35.AST]) -> List[Statement]:
+        res = []  # type: List[Statement]
+        for e in l:
+            stmt = self.visit(e)
+            assert stmt is None or isinstance(stmt, Statement)
+            res.append(stmt)
+        return res
 
     op_map = {
         ast35.Add: '+',
@@ -181,12 +185,12 @@ class ASTConverter(ast35.NodeTransformer):
     def as_block(self, stmts: List[ast35.stmt], lineno: int) -> Block:
         b = None
         if stmts:
-            b = Block(self.fix_function_overloads(self.visit_list(stmts)))
+            b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts)))
             b.set_line(lineno)
         return b
 
-    def fix_function_overloads(self, stmts: List[Node]) -> List[Node]:
-        ret = []  # type: List[Node]
+    def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]:
+        ret = []  # type: List[Statement]
         current_overload = []
         current_overload_name = None
         # mypy doesn't actually check that the decorator is literally @overload
@@ -229,14 +233,14 @@ class ASTConverter(ast35.NodeTransformer):
             return 'builtins'
         return id
 
-    def visit_Module(self, mod: ast35.Module) -> Node:
-        body = self.fix_function_overloads(self.visit_list(mod.body))
+    def visit_Module(self, mod: ast35.Module) -> MypyFile:
+        body = self.fix_function_overloads(self.translate_stmt_list(mod.body))
 
         return MypyFile(body,
                         self.imports,
                         False,
                         {ti.lineno for ti in mod.type_ignores},
-                        weak_opts=set())
+                        )
 
     # --- stmt ---
     # FunctionDef(identifier name, arguments args,
@@ -244,17 +248,17 @@ class ASTConverter(ast35.NodeTransformer):
     # arguments = (arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults,
     #              arg? kwarg, expr* defaults)
     @with_line
-    def visit_FunctionDef(self, n: ast35.FunctionDef) -> Node:
+    def visit_FunctionDef(self, n: ast35.FunctionDef) -> Union[FuncDef, Decorator]:
         return self.do_func_def(n)
 
     # AsyncFunctionDef(identifier name, arguments args,
     #                  stmt* body, expr* decorator_list, expr? returns, string? type_comment)
     @with_line
-    def visit_AsyncFunctionDef(self, n: ast35.AsyncFunctionDef) -> Node:
+    def visit_AsyncFunctionDef(self, n: ast35.AsyncFunctionDef) -> Union[FuncDef, Decorator]:
         return self.do_func_def(n, is_coroutine=True)
 
     def do_func_def(self, n: Union[ast35.FunctionDef, ast35.AsyncFunctionDef],
-                    is_coroutine: bool = False) -> Node:
+                    is_coroutine: bool = False) -> Union[FuncDef, Decorator]:
         """Helper shared between visit_FunctionDef and visit_AsyncFunctionDef."""
         args = self.transform_args(n.args, n.lineno)
 
@@ -265,7 +269,7 @@ class ASTConverter(ast35.NodeTransformer):
             try:
                 func_type_ast = ast35.parse(n.type_comment, '<func_type>', 'func_type')
             except SyntaxError:
-                raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, n.lineno)
+                raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset)
             assert isinstance(func_type_ast, ast35.FunctionType)
             # for ellipsis arg
             if (len(func_type_ast.argtypes) == 1 and
@@ -273,8 +277,10 @@ class ASTConverter(ast35.NodeTransformer):
                 arg_types = [a.type_annotation if a.type_annotation is not None else AnyType()
                              for a in args]
             else:
-                arg_types = [a if a is not None else AnyType() for
-                            a in TypeConverter(line=n.lineno).visit_list(func_type_ast.argtypes)]
+                translated_args = (TypeConverter(line=n.lineno)
+                                   .translate_expr_list(func_type_ast.argtypes))
+                arg_types = [a if a is not None else AnyType()
+                             for a in translated_args]
             return_type = TypeConverter(line=n.lineno).visit(func_type_ast.returns)
 
             # add implicit self type
@@ -317,11 +323,11 @@ class ASTConverter(ast35.NodeTransformer):
             func_def.is_decorated = True
             func_def.set_line(n.lineno + len(n.decorator_list))
             func_def.body.set_line(func_def.get_line())
-            return Decorator(func_def, self.visit_list(n.decorator_list), var)
+            return Decorator(func_def, self.translate_expr_list(n.decorator_list), var)
         else:
             return func_def
 
-    def set_type_optional(self, type: Type, initializer: Node) -> None:
+    def set_type_optional(self, type: Type, initializer: Expression) -> None:
         if not experiments.STRICT_OPTIONAL:
             return
         # Indicate that type should be wrapped in an Optional if arg is initialized to None.
@@ -377,7 +383,7 @@ class ASTConverter(ast35.NodeTransformer):
     #  stmt* body,
     #  expr* decorator_list)
     @with_line
-    def visit_ClassDef(self, n: ast35.ClassDef) -> Node:
+    def visit_ClassDef(self, n: ast35.ClassDef) -> ClassDef:
         self.class_nesting += 1
         metaclass_arg = find(lambda x: x.arg == 'metaclass', n.keywords)
         metaclass = None
@@ -387,48 +393,63 @@ class ASTConverter(ast35.NodeTransformer):
         cdef = ClassDef(n.name,
                         self.as_block(n.body, n.lineno),
                         None,
-                        self.visit_list(n.bases),
+                        self.translate_expr_list(n.bases),
                         metaclass=metaclass)
-        cdef.decorators = self.visit_list(n.decorator_list)
+        cdef.decorators = self.translate_expr_list(n.decorator_list)
         self.class_nesting -= 1
         return cdef
 
     # Return(expr? value)
     @with_line
-    def visit_Return(self, n: ast35.Return) -> Node:
+    def visit_Return(self, n: ast35.Return) -> ReturnStmt:
         return ReturnStmt(self.visit(n.value))
 
     # Delete(expr* targets)
     @with_line
-    def visit_Delete(self, n: ast35.Delete) -> Node:
+    def visit_Delete(self, n: ast35.Delete) -> DelStmt:
         if len(n.targets) > 1:
-            tup = TupleExpr(self.visit_list(n.targets))
+            tup = TupleExpr(self.translate_expr_list(n.targets))
             tup.set_line(n.lineno)
             return DelStmt(tup)
         else:
             return DelStmt(self.visit(n.targets[0]))
 
-    # Assign(expr* targets, expr value, string? type_comment)
+    # Assign(expr* targets, expr? value, string? type_comment, expr? annotation)
     @with_line
-    def visit_Assign(self, n: ast35.Assign) -> Node:
+    def visit_Assign(self, n: ast35.Assign) -> AssignmentStmt:
         typ = None
-        if n.type_comment:
+        if hasattr(n, 'annotation') and n.annotation is not None:  # type: ignore
+            new_syntax = True
+        else:
+            new_syntax = False
+        if new_syntax and self.pyversion < (3, 6):
+            raise TypeCommentParseError('Variable annotation syntax is only '
+                                        'suppoted in Python 3.6, use type '
+                                        'comment instead', n.lineno, n.col_offset)
+        # typed_ast prevents having both type_comment and annotation.
+        if n.type_comment is not None:
             typ = parse_type_comment(n.type_comment, n.lineno)
-
-        return AssignmentStmt(self.visit_list(n.targets),
-                              self.visit(n.value),
-                              type=typ)
+        elif new_syntax:
+            typ = TypeConverter(line=n.lineno).visit(n.annotation)  # type: ignore
+        if n.value is None:  # always allow 'x: int'
+            rvalue = TempNode(AnyType())  # type: Expression
+        else:
+            rvalue = self.visit(n.value)
+        lvalues = self.translate_expr_list(n.targets)
+        return AssignmentStmt(lvalues,
+                              rvalue,
+                              type=typ, new_syntax=new_syntax)
 
     # AugAssign(expr target, operator op, expr value)
     @with_line
-    def visit_AugAssign(self, n: ast35.AugAssign) -> Node:
+    def visit_AugAssign(self, n: ast35.AugAssign) -> OperatorAssignmentStmt:
         return OperatorAssignmentStmt(self.from_operator(n.op),
                               self.visit(n.target),
                               self.visit(n.value))
 
     # For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)
     @with_line
-    def visit_For(self, n: ast35.For) -> Node:
+    def visit_For(self, n: ast35.For) -> ForStmt:
         return ForStmt(self.visit(n.target),
                        self.visit(n.iter),
                        self.as_block(n.body, n.lineno),
@@ -436,7 +457,7 @@ class ASTConverter(ast35.NodeTransformer):
 
     # AsyncFor(expr target, expr iter, stmt* body, stmt* orelse)
     @with_line
-    def visit_AsyncFor(self, n: ast35.AsyncFor) -> Node:
+    def visit_AsyncFor(self, n: ast35.AsyncFor) -> ForStmt:
         r = ForStmt(self.visit(n.target),
                     self.visit(n.iter),
                     self.as_block(n.body, n.lineno),
@@ -446,28 +467,28 @@ class ASTConverter(ast35.NodeTransformer):
 
     # While(expr test, stmt* body, stmt* orelse)
     @with_line
-    def visit_While(self, n: ast35.While) -> Node:
+    def visit_While(self, n: ast35.While) -> WhileStmt:
         return WhileStmt(self.visit(n.test),
                          self.as_block(n.body, n.lineno),
                          self.as_block(n.orelse, n.lineno))
 
     # If(expr test, stmt* body, stmt* orelse)
     @with_line
-    def visit_If(self, n: ast35.If) -> Node:
+    def visit_If(self, n: ast35.If) -> IfStmt:
         return IfStmt([self.visit(n.test)],
                       [self.as_block(n.body, n.lineno)],
                       self.as_block(n.orelse, n.lineno))
 
     # With(withitem* items, stmt* body, string? type_comment)
     @with_line
-    def visit_With(self, n: ast35.With) -> Node:
+    def visit_With(self, n: ast35.With) -> WithStmt:
         return WithStmt([self.visit(i.context_expr) for i in n.items],
                         [self.visit(i.optional_vars) for i in n.items],
                         self.as_block(n.body, n.lineno))
 
     # AsyncWith(withitem* items, stmt* body)
     @with_line
-    def visit_AsyncWith(self, n: ast35.AsyncWith) -> Node:
+    def visit_AsyncWith(self, n: ast35.AsyncWith) -> WithStmt:
         r = WithStmt([self.visit(i.context_expr) for i in n.items],
                      [self.visit(i.optional_vars) for i in n.items],
                      self.as_block(n.body, n.lineno))
@@ -476,12 +497,12 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Raise(expr? exc, expr? cause)
     @with_line
-    def visit_Raise(self, n: ast35.Raise) -> Node:
+    def visit_Raise(self, n: ast35.Raise) -> RaiseStmt:
         return RaiseStmt(self.visit(n.exc), self.visit(n.cause))
 
     # Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)
     @with_line
-    def visit_Try(self, n: ast35.Try) -> Node:
+    def visit_Try(self, n: ast35.Try) -> TryStmt:
         vs = [NameExpr(h.name) if h.name is not None else None for h in n.handlers]
         types = [self.visit(h.type) for h in n.handlers]
         handlers = [self.as_block(h.body, h.lineno) for h in n.handlers]
@@ -495,19 +516,29 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Assert(expr test, expr? msg)
     @with_line
-    def visit_Assert(self, n: ast35.Assert) -> Node:
+    def visit_Assert(self, n: ast35.Assert) -> AssertStmt:
         return AssertStmt(self.visit(n.test))
 
     # Import(alias* names)
     @with_line
-    def visit_Import(self, n: ast35.Import) -> Node:
-        i = Import([(self.translate_module_id(a.name), a.asname) for a in n.names])
+    def visit_Import(self, n: ast35.Import) -> Import:
+        names = []  # type: List[Tuple[str, str]]
+        for alias in n.names:
+            name = self.translate_module_id(alias.name)
+            asname = alias.asname
+            if asname is None and name != alias.name:
+                # if the module name has been translated (and it's not already
+                # an explicit import-as), make it an implicit import-as the
+                # original name
+                asname = alias.name
+            names.append((name, asname))
+        i = Import(names)
         self.imports.append(i)
         return i
 
     # ImportFrom(identifier? module, alias* names, int? level)
     @with_line
-    def visit_ImportFrom(self, n: ast35.ImportFrom) -> Node:
+    def visit_ImportFrom(self, n: ast35.ImportFrom) -> ImportBase:
         i = None  # type: ImportBase
         if len(n.names) == 1 and n.names[0].name == '*':
             i = ImportAll(n.module, n.level)
@@ -520,39 +551,39 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Global(identifier* names)
     @with_line
-    def visit_Global(self, n: ast35.Global) -> Node:
+    def visit_Global(self, n: ast35.Global) -> GlobalDecl:
         return GlobalDecl(n.names)
 
     # Nonlocal(identifier* names)
     @with_line
-    def visit_Nonlocal(self, n: ast35.Nonlocal) -> Node:
+    def visit_Nonlocal(self, n: ast35.Nonlocal) -> NonlocalDecl:
         return NonlocalDecl(n.names)
 
     # Expr(expr value)
     @with_line
-    def visit_Expr(self, n: ast35.Expr) -> Node:
+    def visit_Expr(self, n: ast35.Expr) -> ExpressionStmt:
         value = self.visit(n.value)
         return ExpressionStmt(value)
 
     # Pass
     @with_line
-    def visit_Pass(self, n: ast35.Pass) -> Node:
+    def visit_Pass(self, n: ast35.Pass) -> PassStmt:
         return PassStmt()
 
     # Break
     @with_line
-    def visit_Break(self, n: ast35.Break) -> Node:
+    def visit_Break(self, n: ast35.Break) -> BreakStmt:
         return BreakStmt()
 
     # Continue
     @with_line
-    def visit_Continue(self, n: ast35.Continue) -> Node:
+    def visit_Continue(self, n: ast35.Continue) -> ContinueStmt:
         return ContinueStmt()
 
     # --- expr ---
     # BoolOp(boolop op, expr* values)
     @with_line
-    def visit_BoolOp(self, n: ast35.BoolOp) -> Node:
+    def visit_BoolOp(self, n: ast35.BoolOp) -> OpExpr:
         # mypy translates (1 and 2 and 3) as (1 and (2 and 3))
         assert len(n.values) >= 2
         op = None
@@ -564,17 +595,17 @@ class ASTConverter(ast35.NodeTransformer):
             raise RuntimeError('unknown BoolOp ' + str(type(n)))
 
         # potentially inefficient!
-        def group(vals: List[Node]) -> Node:
+        def group(vals: List[Expression]) -> OpExpr:
             if len(vals) == 2:
                 return OpExpr(op, vals[0], vals[1])
             else:
                 return OpExpr(op, vals[0], group(vals[1:]))
 
-        return group(self.visit_list(n.values))
+        return group(self.translate_expr_list(n.values))
 
     # BinOp(expr left, operator op, expr right)
     @with_line
-    def visit_BinOp(self, n: ast35.BinOp) -> Node:
+    def visit_BinOp(self, n: ast35.BinOp) -> OpExpr:
         op = self.from_operator(n.op)
 
         if op is None:
@@ -584,7 +615,7 @@ class ASTConverter(ast35.NodeTransformer):
 
     # UnaryOp(unaryop op, expr operand)
     @with_line
-    def visit_UnaryOp(self, n: ast35.UnaryOp) -> Node:
+    def visit_UnaryOp(self, n: ast35.UnaryOp) -> UnaryExpr:
         op = None
         if isinstance(n.op, ast35.Invert):
             op = '~'
@@ -602,46 +633,48 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Lambda(arguments args, expr body)
     @with_line
-    def visit_Lambda(self, n: ast35.Lambda) -> Node:
+    def visit_Lambda(self, n: ast35.Lambda) -> FuncExpr:
         body = ast35.Return(n.body)
         body.lineno = n.lineno
+        body.col_offset = n.col_offset
 
         return FuncExpr(self.transform_args(n.args, n.lineno),
                         self.as_block([body], n.lineno))
 
     # IfExp(expr test, expr body, expr orelse)
     @with_line
-    def visit_IfExp(self, n: ast35.IfExp) -> Node:
+    def visit_IfExp(self, n: ast35.IfExp) -> ConditionalExpr:
         return ConditionalExpr(self.visit(n.test),
                                self.visit(n.body),
                                self.visit(n.orelse))
 
     # Dict(expr* keys, expr* values)
     @with_line
-    def visit_Dict(self, n: ast35.Dict) -> Node:
-        return DictExpr(list(zip(self.visit_list(n.keys), self.visit_list(n.values))))
+    def visit_Dict(self, n: ast35.Dict) -> DictExpr:
+        return DictExpr(list(zip(self.translate_expr_list(n.keys),
+                                 self.translate_expr_list(n.values))))
 
     # Set(expr* elts)
     @with_line
-    def visit_Set(self, n: ast35.Set) -> Node:
-        return SetExpr(self.visit_list(n.elts))
+    def visit_Set(self, n: ast35.Set) -> SetExpr:
+        return SetExpr(self.translate_expr_list(n.elts))
 
     # ListComp(expr elt, comprehension* generators)
     @with_line
-    def visit_ListComp(self, n: ast35.ListComp) -> Node:
+    def visit_ListComp(self, n: ast35.ListComp) -> ListComprehension:
         return ListComprehension(self.visit_GeneratorExp(cast(ast35.GeneratorExp, n)))
 
     # SetComp(expr elt, comprehension* generators)
     @with_line
-    def visit_SetComp(self, n: ast35.SetComp) -> Node:
+    def visit_SetComp(self, n: ast35.SetComp) -> SetComprehension:
         return SetComprehension(self.visit_GeneratorExp(cast(ast35.GeneratorExp, n)))
 
     # DictComp(expr key, expr value, comprehension* generators)
     @with_line
-    def visit_DictComp(self, n: ast35.DictComp) -> Node:
+    def visit_DictComp(self, n: ast35.DictComp) -> DictionaryComprehension:
         targets = [self.visit(c.target) for c in n.generators]
         iters = [self.visit(c.iter) for c in n.generators]
-        ifs_list = [self.visit_list(c.ifs) for c in n.generators]
+        ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators]
         return DictionaryComprehension(self.visit(n.key),
                                        self.visit(n.value),
                                        targets,
@@ -653,7 +686,7 @@ class ASTConverter(ast35.NodeTransformer):
     def visit_GeneratorExp(self, n: ast35.GeneratorExp) -> GeneratorExpr:
         targets = [self.visit(c.target) for c in n.generators]
         iters = [self.visit(c.iter) for c in n.generators]
-        ifs_list = [self.visit_list(c.ifs) for c in n.generators]
+        ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators]
         return GeneratorExpr(self.visit(n.elt),
                              targets,
                              iters,
@@ -661,35 +694,35 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Await(expr value)
     @with_line
-    def visit_Await(self, n: ast35.Await) -> Node:
+    def visit_Await(self, n: ast35.Await) -> AwaitExpr:
         v = self.visit(n.value)
         return AwaitExpr(v)
 
     # Yield(expr? value)
     @with_line
-    def visit_Yield(self, n: ast35.Yield) -> Node:
+    def visit_Yield(self, n: ast35.Yield) -> YieldExpr:
         return YieldExpr(self.visit(n.value))
 
     # YieldFrom(expr value)
     @with_line
-    def visit_YieldFrom(self, n: ast35.YieldFrom) -> Node:
+    def visit_YieldFrom(self, n: ast35.YieldFrom) -> YieldFromExpr:
         return YieldFromExpr(self.visit(n.value))
 
     # Compare(expr left, cmpop* ops, expr* comparators)
     @with_line
-    def visit_Compare(self, n: ast35.Compare) -> Node:
+    def visit_Compare(self, n: ast35.Compare) -> ComparisonExpr:
         operators = [self.from_comp_operator(o) for o in n.ops]
-        operands = self.visit_list([n.left] + n.comparators)
+        operands = self.translate_expr_list([n.left] + n.comparators)
         return ComparisonExpr(operators, operands)
 
     # Call(expr func, expr* args, keyword* keywords)
     # keyword = (identifier? arg, expr value)
     @with_line
-    def visit_Call(self, n: ast35.Call) -> Node:
+    def visit_Call(self, n: ast35.Call) -> CallExpr:
         def is_star2arg(k: ast35.keyword) -> bool:
             return k.arg is None
 
-        arg_types = self.visit_list(
+        arg_types = self.translate_expr_list(
             [a.value if isinstance(a, ast35.Starred) else a for a in n.args] +
             [k.value for k in n.keywords])
         arg_kinds = ([ARG_STAR if isinstance(a, ast35.Starred) else ARG_POS for a in n.args] +
@@ -701,7 +734,7 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Num(object n) -- a number as a PyObject.
     @with_line
-    def visit_Num(self, n: ast35.Num) -> Node:
+    def visit_Num(self, n: ast35.Num) -> Union[IntExpr, FloatExpr, ComplexExpr]:
         if isinstance(n.n, int):
             return IntExpr(n.n)
         elif isinstance(n.n, float):
@@ -713,7 +746,7 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Str(string s)
     @with_line
-    def visit_Str(self, n: ast35.Str) -> Node:
+    def visit_Str(self, n: ast35.Str) -> Union[UnicodeExpr, StrExpr]:
         if self.pyversion[0] >= 3 or self.is_stub:
             # Hack: assume all string literals in Python 2 stubs are normal
             # strs (i.e. not unicode).  All stubs are parsed with the Python 3
@@ -727,7 +760,7 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Bytes(bytes s)
     @with_line
-    def visit_Bytes(self, n: ast35.Bytes) -> Node:
+    def visit_Bytes(self, n: ast35.Bytes) -> Union[BytesExpr, StrExpr]:
         # The following line is a bit hacky, but is the best way to maintain
         # compatibility with how mypy currently parses the contents of bytes literals.
         contents = str(n.s)[2:-1]
@@ -738,17 +771,17 @@ class ASTConverter(ast35.NodeTransformer):
             return StrExpr(contents)
 
     # NameConstant(singleton value)
-    def visit_NameConstant(self, n: ast35.NameConstant) -> Node:
+    def visit_NameConstant(self, n: ast35.NameConstant) -> NameExpr:
         return NameExpr(str(n.value))
 
     # Ellipsis
     @with_line
-    def visit_Ellipsis(self, n: ast35.Ellipsis) -> Node:
+    def visit_Ellipsis(self, n: ast35.Ellipsis) -> EllipsisExpr:
         return EllipsisExpr()
 
     # Attribute(expr value, identifier attr, expr_context ctx)
     @with_line
-    def visit_Attribute(self, n: ast35.Attribute) -> Node:
+    def visit_Attribute(self, n: ast35.Attribute) -> Union[MemberExpr, SuperExpr]:
         if (isinstance(n.value, ast35.Call) and
                 isinstance(n.value.func, ast35.Name) and
                 n.value.func.id == 'super'):
@@ -758,40 +791,40 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Subscript(expr value, slice slice, expr_context ctx)
     @with_line
-    def visit_Subscript(self, n: ast35.Subscript) -> Node:
+    def visit_Subscript(self, n: ast35.Subscript) -> IndexExpr:
         return IndexExpr(self.visit(n.value), self.visit(n.slice))
 
     # Starred(expr value, expr_context ctx)
     @with_line
-    def visit_Starred(self, n: ast35.Starred) -> Node:
+    def visit_Starred(self, n: ast35.Starred) -> StarExpr:
         return StarExpr(self.visit(n.value))
 
     # Name(identifier id, expr_context ctx)
     @with_line
-    def visit_Name(self, n: ast35.Name) -> Node:
+    def visit_Name(self, n: ast35.Name) -> NameExpr:
         return NameExpr(n.id)
 
     # List(expr* elts, expr_context ctx)
     @with_line
-    def visit_List(self, n: ast35.List) -> Node:
+    def visit_List(self, n: ast35.List) -> ListExpr:
         return ListExpr([self.visit(e) for e in n.elts])
 
     # Tuple(expr* elts, expr_context ctx)
     @with_line
-    def visit_Tuple(self, n: ast35.Tuple) -> Node:
+    def visit_Tuple(self, n: ast35.Tuple) -> TupleExpr:
         return TupleExpr([self.visit(e) for e in n.elts])
 
     # --- slice ---
 
     # Slice(expr? lower, expr? upper, expr? step)
-    def visit_Slice(self, n: ast35.Slice) -> Node:
+    def visit_Slice(self, n: ast35.Slice) -> SliceExpr:
         return SliceExpr(self.visit(n.lower),
                          self.visit(n.upper),
                          self.visit(n.step))
 
     # ExtSlice(slice* dims)
-    def visit_ExtSlice(self, n: ast35.ExtSlice) -> Node:
-        return TupleExpr(self.visit_list(n.dims))
+    def visit_ExtSlice(self, n: ast35.ExtSlice) -> TupleExpr:
+        return TupleExpr(self.translate_expr_list(n.dims))
 
     # Index(expr value)
     def visit_Index(self, n: ast35.Index) -> Node:
@@ -802,13 +835,20 @@ class TypeConverter(ast35.NodeTransformer):
     def __init__(self, line: int = -1) -> None:
         self.line = line
 
+    def visit_raw_str(self, s: str) -> Type:
+        # An escape hatch that allows the AST walker in fastparse2 to
+        # directly hook into the Python 3.5 type converter in some cases
+        # without needing to create an intermediary `ast35.Str` object.
+        return parse_type_comment(s.strip(), line=self.line)
+
     def generic_visit(self, node: ast35.AST) -> None:
-        raise TypeCommentParseError(TYPE_COMMENT_AST_ERROR, self.line)
+        raise TypeCommentParseError(TYPE_COMMENT_AST_ERROR, self.line,
+                                    getattr(node, 'col_offset', -1))
 
     def visit_NoneType(self, n: Any) -> Type:
         return None
 
-    def visit_list(self, l: Sequence[ast35.AST]) -> List[Type]:
+    def translate_expr_list(self, l: Sequence[ast35.AST]) -> List[Type]:
         return [self.visit(e) for e in l]
 
     def visit_Name(self, n: ast35.Name) -> Type:
@@ -830,15 +870,18 @@ class TypeConverter(ast35.NodeTransformer):
         assert isinstance(value, UnboundType)
         assert not value.args
 
+        empty_tuple_index = False
         if isinstance(n.slice.value, ast35.Tuple):
-            params = self.visit_list(n.slice.value.elts)
+            params = self.translate_expr_list(n.slice.value.elts)
+            if len(n.slice.value.elts) == 0:
+                empty_tuple_index = True
         else:
             params = [self.visit(n.slice.value)]
 
-        return UnboundType(value.name, params, line=self.line)
+        return UnboundType(value.name, params, line=self.line, empty_tuple_index=empty_tuple_index)
 
     def visit_Tuple(self, n: ast35.Tuple) -> Type:
-        return TupleType(self.visit_list(n.elts), None, implicit=True, line=self.line)
+        return TupleType(self.translate_expr_list(n.elts), None, implicit=True, line=self.line)
 
     # Attribute(expr value, identifier attr, expr_context ctx)
     def visit_Attribute(self, n: ast35.Attribute) -> Type:
@@ -855,10 +898,11 @@ class TypeConverter(ast35.NodeTransformer):
 
     # List(expr* elts, expr_context ctx)
     def visit_List(self, n: ast35.List) -> Type:
-        return TypeList(self.visit_list(n.elts), line=self.line)
+        return TypeList(self.translate_expr_list(n.elts), line=self.line)
 
 
 class TypeCommentParseError(Exception):
-    def __init__(self, msg: str, lineno: int) -> None:
+    def __init__(self, msg: str, lineno: int, offset: int) -> None:
         self.msg = msg
         self.lineno = lineno
+        self.offset = offset
diff --git a/mypy/fastparse.py b/mypy/fastparse2.py
similarity index 50%
copy from mypy/fastparse.py
copy to mypy/fastparse2.py
index 2432317..76e95bb 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse2.py
@@ -1,3 +1,19 @@
+"""
+This file is nearly identical to `fastparse.py`, except that it works with a Python 2
+AST instead of a Python 3 AST.
+
+Previously, how we handled Python 2 code was by first obtaining the Python 2 AST via
+typed_ast, converting it into a Python 3 AST by using typed_ast.conversion, then
+running it through mypy.fastparse.
+
+While this worked, it did add some overhead, especially in larger Python 2 codebases.
+This module allows us to skip the conversion step, saving us some time.
+
+The reason why this file is not easily merged with mypy.fastparse despite the large amount
+of redundancy is because the Python 2 AST and the Python 3 AST nodes belong to two completely
+different class heirarchies, which made it difficult to write a shared visitor between the
+two in a typesafe way.
+"""
 from functools import wraps
 import sys
 
@@ -11,23 +27,22 @@ from mypy.nodes import (
     TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr,
     DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr,
     FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr,
-    UnaryExpr, FuncExpr, ComparisonExpr,
-    StarExpr, YieldFromExpr, NonlocalDecl, DictionaryComprehension,
+    UnaryExpr, FuncExpr, ComparisonExpr, DictionaryComprehension,
     SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, Argument,
-    AwaitExpr,
+    Expression, Statement,
     ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_STAR2
 )
 from mypy.types import (
-    Type, CallableType, FunctionLike, AnyType, UnboundType, TupleType, TypeList, EllipsisType,
+    Type, CallableType, AnyType, UnboundType,
 )
 from mypy import defaults
 from mypy import experiments
 from mypy.errors import Errors
+from mypy.fastparse import TypeConverter, TypeCommentParseError
 
 try:
     from typed_ast import ast27
     from typed_ast import ast35
-    from typed_ast import conversions
 except ImportError:
     if sys.version_info.minor > 2:
         print('You must install the typed_ast package before you can run mypy'
@@ -39,7 +54,7 @@ except ImportError:
               ' Python 3.3 and greater.')
     sys.exit(1)
 
-T = TypeVar('T', bound=Union[ast35.expr, ast35.stmt])
+T = TypeVar('T', bound=Union[ast27.expr, ast27.stmt])
 U = TypeVar('U', bound=Node)
 V = TypeVar('V')
 
@@ -59,38 +74,31 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
     """
     is_stub_file = bool(fnam) and fnam.endswith('.pyi')
     try:
-        if pyversion[0] >= 3 or is_stub_file:
-            ast = ast35.parse(source, fnam, 'exec')
-        else:
-            ast2 = ast27.parse(source, fnam, 'exec')
-            ast = conversions.py2to3(ast2)
-
+        assert pyversion[0] < 3 and not is_stub_file
+        ast = ast27.parse(source, fnam, 'exec')
         tree = ASTConverter(pyversion=pyversion,
                             is_stub=is_stub_file,
                             custom_typing_module=custom_typing_module,
                             ).visit(ast)
+        assert isinstance(tree, MypyFile)
         tree.path = fnam
         tree.is_stub = is_stub_file
         return tree
     except (SyntaxError, TypeCommentParseError) as e:
         if errors:
             errors.set_file('<input>' if fnam is None else fnam)
-            errors.report(e.lineno, e.msg)
+            errors.report(e.lineno, e.offset, e.msg)
         else:
             raise
 
-    return MypyFile([],
-                    [],
-                    False,
-                    set(),
-                    weak_opts=set())
+    return MypyFile([], [], False, set())
 
 
 def parse_type_comment(type_comment: str, line: int) -> Type:
     try:
         typ = ast35.parse(type_comment, '<type_comment>', 'eval')
-    except SyntaxError:
-        raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, line)
+    except SyntaxError as e:
+        raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, line, e.offset)
     else:
         assert isinstance(typ, ast35.Expression)
         return TypeConverter(line=line).visit(typ.body)
@@ -100,7 +108,7 @@ def with_line(f: Callable[['ASTConverter', T], U]) -> Callable[['ASTConverter',
     @wraps(f)
     def wrapper(self: 'ASTConverter', ast: T) -> U:
         node = f(self, ast)
-        node.set_line(ast.lineno)
+        node.set_line(ast.lineno, ast.col_offset)
         return node
     return wrapper
 
@@ -112,7 +120,7 @@ def find(f: Callable[[V], bool], seq: Sequence[V]) -> V:
     return None
 
 
-class ASTConverter(ast35.NodeTransformer):
+class ASTConverter(ast27.NodeTransformer):
     def __init__(self,
                  pyversion: Tuple[int, int],
                  is_stub: bool,
@@ -124,32 +132,44 @@ class ASTConverter(ast35.NodeTransformer):
         self.is_stub = is_stub
         self.custom_typing_module = custom_typing_module
 
-    def generic_visit(self, node: ast35.AST) -> None:
+    def generic_visit(self, node: ast27.AST) -> None:
         raise RuntimeError('AST node not implemented: ' + str(type(node)))
 
     def visit_NoneType(self, n: Any) -> Optional[Node]:
         return None
 
-    def visit_list(self, l: Sequence[ast35.AST]) -> List[Node]:
-        return [self.visit(e) for e in l]
+    def translate_expr_list(self, l: Sequence[ast27.AST]) -> List[Expression]:
+        res = []  # type: List[Expression]
+        for e in l:
+            exp = self.visit(e)
+            assert isinstance(exp, Expression)
+            res.append(exp)
+        return res
+
+    def translate_stmt_list(self, l: Sequence[ast27.AST]) -> List[Statement]:
+        res = []  # type: List[Statement]
+        for e in l:
+            stmt = self.visit(e)
+            assert isinstance(stmt, Statement)
+            res.append(stmt)
+        return res
 
     op_map = {
-        ast35.Add: '+',
-        ast35.Sub: '-',
-        ast35.Mult: '*',
-        ast35.MatMult: '@',
-        ast35.Div: '/',
-        ast35.Mod: '%',
-        ast35.Pow: '**',
-        ast35.LShift: '<<',
-        ast35.RShift: '>>',
-        ast35.BitOr: '|',
-        ast35.BitXor: '^',
-        ast35.BitAnd: '&',
-        ast35.FloorDiv: '//'
+        ast27.Add: '+',
+        ast27.Sub: '-',
+        ast27.Mult: '*',
+        ast27.Div: '/',
+        ast27.Mod: '%',
+        ast27.Pow: '**',
+        ast27.LShift: '<<',
+        ast27.RShift: '>>',
+        ast27.BitOr: '|',
+        ast27.BitXor: '^',
+        ast27.BitAnd: '&',
+        ast27.FloorDiv: '//'
     }
 
-    def from_operator(self, op: ast35.operator) -> str:
+    def from_operator(self, op: ast27.operator) -> str:
         op_name = ASTConverter.op_map.get(type(op))
         if op_name is None:
             raise RuntimeError('Unknown operator ' + str(type(op)))
@@ -159,34 +179,34 @@ class ASTConverter(ast35.NodeTransformer):
             return op_name
 
     comp_op_map = {
-        ast35.Gt: '>',
-        ast35.Lt: '<',
-        ast35.Eq: '==',
-        ast35.GtE: '>=',
-        ast35.LtE: '<=',
-        ast35.NotEq: '!=',
-        ast35.Is: 'is',
-        ast35.IsNot: 'is not',
-        ast35.In: 'in',
-        ast35.NotIn: 'not in'
+        ast27.Gt: '>',
+        ast27.Lt: '<',
+        ast27.Eq: '==',
+        ast27.GtE: '>=',
+        ast27.LtE: '<=',
+        ast27.NotEq: '!=',
+        ast27.Is: 'is',
+        ast27.IsNot: 'is not',
+        ast27.In: 'in',
+        ast27.NotIn: 'not in'
     }
 
-    def from_comp_operator(self, op: ast35.cmpop) -> str:
+    def from_comp_operator(self, op: ast27.cmpop) -> str:
         op_name = ASTConverter.comp_op_map.get(type(op))
         if op_name is None:
             raise RuntimeError('Unknown comparison operator ' + str(type(op)))
         else:
             return op_name
 
-    def as_block(self, stmts: List[ast35.stmt], lineno: int) -> Block:
+    def as_block(self, stmts: List[ast27.stmt], lineno: int) -> Block:
         b = None
         if stmts:
-            b = Block(self.fix_function_overloads(self.visit_list(stmts)))
+            b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts)))
             b.set_line(lineno)
         return b
 
-    def fix_function_overloads(self, stmts: List[Node]) -> List[Node]:
-        ret = []  # type: List[Node]
+    def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]:
+        ret = []  # type: List[Statement]
         current_overload = []
         current_overload_name = None
         # mypy doesn't actually check that the decorator is literally @overload
@@ -229,14 +249,14 @@ class ASTConverter(ast35.NodeTransformer):
             return 'builtins'
         return id
 
-    def visit_Module(self, mod: ast35.Module) -> Node:
-        body = self.fix_function_overloads(self.visit_list(mod.body))
+    def visit_Module(self, mod: ast27.Module) -> MypyFile:
+        body = self.fix_function_overloads(self.translate_stmt_list(mod.body))
 
         return MypyFile(body,
                         self.imports,
                         False,
                         {ti.lineno for ti in mod.type_ignores},
-                        weak_opts=set())
+                        )
 
     # --- stmt ---
     # FunctionDef(identifier name, arguments args,
@@ -244,28 +264,18 @@ class ASTConverter(ast35.NodeTransformer):
     # arguments = (arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults,
     #              arg? kwarg, expr* defaults)
     @with_line
-    def visit_FunctionDef(self, n: ast35.FunctionDef) -> Node:
-        return self.do_func_def(n)
-
-    # AsyncFunctionDef(identifier name, arguments args,
-    #                  stmt* body, expr* decorator_list, expr? returns, string? type_comment)
-    @with_line
-    def visit_AsyncFunctionDef(self, n: ast35.AsyncFunctionDef) -> Node:
-        return self.do_func_def(n, is_coroutine=True)
-
-    def do_func_def(self, n: Union[ast35.FunctionDef, ast35.AsyncFunctionDef],
-                    is_coroutine: bool = False) -> Node:
-        """Helper shared between visit_FunctionDef and visit_AsyncFunctionDef."""
+    def visit_FunctionDef(self, n: ast27.FunctionDef) -> Statement:
+        converter = TypeConverter(line=n.lineno)
         args = self.transform_args(n.args, n.lineno)
 
         arg_kinds = [arg.kind for arg in args]
         arg_names = [arg.variable.name() for arg in args]
         arg_types = None  # type: List[Type]
-        if n.type_comment is not None:
+        if n.type_comment is not None and len(n.type_comment) > 0:
             try:
                 func_type_ast = ast35.parse(n.type_comment, '<func_type>', 'func_type')
             except SyntaxError:
-                raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, n.lineno)
+                raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset)
             assert isinstance(func_type_ast, ast35.FunctionType)
             # for ellipsis arg
             if (len(func_type_ast.argtypes) == 1 and
@@ -274,15 +284,15 @@ class ASTConverter(ast35.NodeTransformer):
                              for a in args]
             else:
                 arg_types = [a if a is not None else AnyType() for
-                            a in TypeConverter(line=n.lineno).visit_list(func_type_ast.argtypes)]
-            return_type = TypeConverter(line=n.lineno).visit(func_type_ast.returns)
+                            a in converter.translate_expr_list(func_type_ast.argtypes)]
+            return_type = converter.visit(func_type_ast.returns)
 
             # add implicit self type
             if self.in_class() and len(arg_types) < len(args):
                 arg_types.insert(0, AnyType())
         else:
             arg_types = [a.type_annotation for a in args]
-            return_type = TypeConverter(line=n.lineno).visit(n.returns)
+            return_type = converter.visit(None)
 
         for arg, arg_type in zip(args, arg_types):
             self.set_type_optional(arg_type, arg.initializer)
@@ -302,9 +312,6 @@ class ASTConverter(ast35.NodeTransformer):
                        args,
                        self.as_block(n.body, n.lineno),
                        func_type)
-        if is_coroutine:
-            # A coroutine is also a generator, mostly for internal reasons.
-            func_def.is_generator = func_def.is_coroutine = True
         if func_type is not None:
             func_type.definition = func_def
             func_type.line = n.lineno
@@ -317,11 +324,11 @@ class ASTConverter(ast35.NodeTransformer):
             func_def.is_decorated = True
             func_def.set_line(n.lineno + len(n.decorator_list))
             func_def.body.set_line(func_def.get_line())
-            return Decorator(func_def, self.visit_list(n.decorator_list), var)
+            return Decorator(func_def, self.translate_expr_list(n.decorator_list), var)
         else:
             return func_def
 
-    def set_type_optional(self, type: Type, initializer: Node) -> None:
+    def set_type_optional(self, type: Type, initializer: Expression) -> None:
         if not experiments.STRICT_OPTIONAL:
             return
         # Indicate that type should be wrapped in an Optional if arg is initialized to None.
@@ -329,44 +336,60 @@ class ASTConverter(ast35.NodeTransformer):
         if isinstance(type, UnboundType):
             type.optional = optional
 
-    def transform_args(self, args: ast35.arguments, line: int) -> List[Argument]:
-        def make_argument(arg: ast35.arg, default: Optional[ast35.expr], kind: int) -> Argument:
-            arg_type = TypeConverter(line=line).visit(arg.annotation)
-            return Argument(Var(arg.arg), arg_type, self.visit(default), kind)
+    def transform_args(self, n: ast27.arguments, line: int) -> List[Argument]:
+        # TODO: remove the cast once https://github.com/python/typeshed/pull/522
+        # is accepted and synced
+        type_comments = cast(List[str], n.type_comments)  # type: ignore
+        converter = TypeConverter(line=line)
+
+        def convert_arg(arg: ast27.expr) -> Var:
+            if isinstance(arg, ast27.Name):
+                v = arg.id
+            elif isinstance(arg, ast27.Tuple):
+                # TODO: An `arg` object may be a Tuple instead of just an identifier in the
+                # case of Python 2 function definitions/lambdas that use the tuple unpacking
+                # syntax. The `typed_ast.conversions` module ended up just simply passing the
+                # the arg object unmodified (instead of converting it into more args, etc).
+                # This isn't typesafe, since we will no longer be always passing in a string
+                # to `Var`, but we'll do the same here for consistency.
+                v = arg  # type: ignore
+            else:
+                raise RuntimeError("'{}' is not a valid argument.".format(ast27.dump(arg)))
+            return Var(v)
+
+        def get_type(i: int) -> Optional[Type]:
+            if i < len(type_comments) and type_comments[i] is not None:
+                return converter.visit_raw_str(type_comments[i])
+            return None
 
-        new_args = []
-        num_no_defaults = len(args.args) - len(args.defaults)
+        args = [(convert_arg(arg), get_type(i)) for i, arg in enumerate(n.args)]
+        defaults = self.translate_expr_list(n.defaults)
+
+        new_args = []  # type: List[Argument]
+        num_no_defaults = len(args) - len(defaults)
         # positional arguments without defaults
-        for a in args.args[:num_no_defaults]:
-            new_args.append(make_argument(a, None, ARG_POS))
+        for a, annotation in args[:num_no_defaults]:
+            new_args.append(Argument(a, annotation, None, ARG_POS))
 
         # positional arguments with defaults
-        for a, d in zip(args.args[num_no_defaults:], args.defaults):
-            new_args.append(make_argument(a, d, ARG_OPT))
+        for (a, annotation), d in zip(args[num_no_defaults:], defaults):
+            new_args.append(Argument(a, annotation, d, ARG_OPT))
 
         # *arg
-        if args.vararg is not None:
-            new_args.append(make_argument(args.vararg, None, ARG_STAR))
-
-        num_no_kw_defaults = len(args.kwonlyargs) - len(args.kw_defaults)
-        # keyword-only arguments without defaults
-        for a in args.kwonlyargs[:num_no_kw_defaults]:
-            new_args.append(make_argument(a, None, ARG_NAMED))
-
-        # keyword-only arguments with defaults
-        for a, d in zip(args.kwonlyargs[num_no_kw_defaults:], args.kw_defaults):
-            new_args.append(make_argument(a, d, ARG_NAMED))
+        if n.vararg is not None:
+            new_args.append(Argument(Var(n.vararg), get_type(len(args)), None, ARG_STAR))
 
         # **kwarg
-        if args.kwarg is not None:
-            new_args.append(make_argument(args.kwarg, None, ARG_STAR2))
+        if n.kwarg is not None:
+            typ = get_type(len(args) + (0 if n.vararg is None else 1))
+            new_args.append(Argument(Var(n.kwarg), typ, None, ARG_STAR2))
 
         return new_args
 
-    def stringify_name(self, n: ast35.AST) -> str:
-        if isinstance(n, ast35.Name):
+    def stringify_name(self, n: ast27.AST) -> str:
+        if isinstance(n, ast27.Name):
             return n.id
-        elif isinstance(n, ast35.Attribute):
+        elif isinstance(n, ast27.Attribute):
             return "{}.{}".format(self.stringify_name(n.value), n.attr)
         else:
             assert False, "can't stringify " + str(type(n))
@@ -377,32 +400,28 @@ class ASTConverter(ast35.NodeTransformer):
     #  stmt* body,
     #  expr* decorator_list)
     @with_line
-    def visit_ClassDef(self, n: ast35.ClassDef) -> Node:
+    def visit_ClassDef(self, n: ast27.ClassDef) -> ClassDef:
         self.class_nesting += 1
-        metaclass_arg = find(lambda x: x.arg == 'metaclass', n.keywords)
-        metaclass = None
-        if metaclass_arg:
-            metaclass = self.stringify_name(metaclass_arg.value)
 
         cdef = ClassDef(n.name,
                         self.as_block(n.body, n.lineno),
                         None,
-                        self.visit_list(n.bases),
-                        metaclass=metaclass)
-        cdef.decorators = self.visit_list(n.decorator_list)
+                        self.translate_expr_list(n.bases),
+                        metaclass=None)
+        cdef.decorators = self.translate_expr_list(n.decorator_list)
         self.class_nesting -= 1
         return cdef
 
     # Return(expr? value)
     @with_line
-    def visit_Return(self, n: ast35.Return) -> Node:
+    def visit_Return(self, n: ast27.Return) -> ReturnStmt:
         return ReturnStmt(self.visit(n.value))
 
     # Delete(expr* targets)
     @with_line
-    def visit_Delete(self, n: ast35.Delete) -> Node:
+    def visit_Delete(self, n: ast27.Delete) -> DelStmt:
         if len(n.targets) > 1:
-            tup = TupleExpr(self.visit_list(n.targets))
+            tup = TupleExpr(self.translate_expr_list(n.targets))
             tup.set_line(n.lineno)
             return DelStmt(tup)
         else:
@@ -410,104 +429,173 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Assign(expr* targets, expr value, string? type_comment)
     @with_line
-    def visit_Assign(self, n: ast35.Assign) -> Node:
+    def visit_Assign(self, n: ast27.Assign) -> AssignmentStmt:
         typ = None
         if n.type_comment:
             typ = parse_type_comment(n.type_comment, n.lineno)
 
-        return AssignmentStmt(self.visit_list(n.targets),
+        return AssignmentStmt(self.translate_expr_list(n.targets),
                               self.visit(n.value),
                               type=typ)
 
     # AugAssign(expr target, operator op, expr value)
     @with_line
-    def visit_AugAssign(self, n: ast35.AugAssign) -> Node:
+    def visit_AugAssign(self, n: ast27.AugAssign) -> OperatorAssignmentStmt:
         return OperatorAssignmentStmt(self.from_operator(n.op),
                               self.visit(n.target),
                               self.visit(n.value))
 
     # For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)
     @with_line
-    def visit_For(self, n: ast35.For) -> Node:
+    def visit_For(self, n: ast27.For) -> ForStmt:
         return ForStmt(self.visit(n.target),
                        self.visit(n.iter),
                        self.as_block(n.body, n.lineno),
                        self.as_block(n.orelse, n.lineno))
 
-    # AsyncFor(expr target, expr iter, stmt* body, stmt* orelse)
-    @with_line
-    def visit_AsyncFor(self, n: ast35.AsyncFor) -> Node:
-        r = ForStmt(self.visit(n.target),
-                    self.visit(n.iter),
-                    self.as_block(n.body, n.lineno),
-                    self.as_block(n.orelse, n.lineno))
-        r.is_async = True
-        return r
-
     # While(expr test, stmt* body, stmt* orelse)
     @with_line
-    def visit_While(self, n: ast35.While) -> Node:
+    def visit_While(self, n: ast27.While) -> WhileStmt:
         return WhileStmt(self.visit(n.test),
                          self.as_block(n.body, n.lineno),
                          self.as_block(n.orelse, n.lineno))
 
     # If(expr test, stmt* body, stmt* orelse)
     @with_line
-    def visit_If(self, n: ast35.If) -> Node:
+    def visit_If(self, n: ast27.If) -> IfStmt:
         return IfStmt([self.visit(n.test)],
                       [self.as_block(n.body, n.lineno)],
                       self.as_block(n.orelse, n.lineno))
 
     # With(withitem* items, stmt* body, string? type_comment)
     @with_line
-    def visit_With(self, n: ast35.With) -> Node:
-        return WithStmt([self.visit(i.context_expr) for i in n.items],
-                        [self.visit(i.optional_vars) for i in n.items],
+    def visit_With(self, n: ast27.With) -> WithStmt:
+        return WithStmt([self.visit(n.context_expr)],
+                        [self.visit(n.optional_vars)],
                         self.as_block(n.body, n.lineno))
 
-    # AsyncWith(withitem* items, stmt* body)
     @with_line
-    def visit_AsyncWith(self, n: ast35.AsyncWith) -> Node:
-        r = WithStmt([self.visit(i.context_expr) for i in n.items],
-                     [self.visit(i.optional_vars) for i in n.items],
-                     self.as_block(n.body, n.lineno))
-        r.is_async = True
-        return r
+    def visit_Raise(self, n: ast27.Raise) -> RaiseStmt:
+        e = None
+        if n.type is not None:
+            e = n.type
+
+            if n.inst is not None and not (isinstance(n.inst, ast27.Name) and n.inst.id == "None"):
+                if isinstance(n.inst, ast27.Tuple):
+                    args = n.inst.elts
+                else:
+                    args = [n.inst]
+                e = ast27.Call(e, args, [], None, None, lineno=e.lineno, col_offset=-1)
 
-    # Raise(expr? exc, expr? cause)
+        return RaiseStmt(self.visit(e), None)
+
+    # TryExcept(stmt* body, excepthandler* handlers, stmt* orelse)
     @with_line
-    def visit_Raise(self, n: ast35.Raise) -> Node:
-        return RaiseStmt(self.visit(n.exc), self.visit(n.cause))
+    def visit_TryExcept(self, n: ast27.TryExcept) -> TryStmt:
+        return self.try_handler(n.body, n.handlers, n.orelse, [], n.lineno)
 
-    # Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)
     @with_line
-    def visit_Try(self, n: ast35.Try) -> Node:
-        vs = [NameExpr(h.name) if h.name is not None else None for h in n.handlers]
-        types = [self.visit(h.type) for h in n.handlers]
-        handlers = [self.as_block(h.body, h.lineno) for h in n.handlers]
+    def visit_TryFinally(self, n: ast27.TryFinally) -> TryStmt:
+        if len(n.body) == 1 and isinstance(n.body[0], ast27.TryExcept):
+            return self.try_handler([n.body[0]], [], [], n.finalbody, n.lineno)
+        else:
+            return self.try_handler(n.body, [], [], n.finalbody, n.lineno)
+
+    def try_handler(self,
+                    body: List[ast27.stmt],
+                    handlers: List[ast27.ExceptHandler],
+                    orelse: List[ast27.stmt],
+                    finalbody: List[ast27.stmt],
+                    lineno: int) -> TryStmt:
+        def produce_name(item: ast27.ExceptHandler) -> Optional[NameExpr]:
+            if item.name is None:
+                return None
+            elif isinstance(item.name, ast27.Name):
+                return NameExpr(item.name.id)
+            else:
+                raise RuntimeError("'{}' has non-Name name.".format(ast27.dump(item)))
+
+        vs = [produce_name(h) for h in handlers]
+        types = [self.visit(h.type) for h in handlers]
+        handlers_ = [self.as_block(h.body, h.lineno) for h in handlers]
 
-        return TryStmt(self.as_block(n.body, n.lineno),
+        return TryStmt(self.as_block(body, lineno),
                        vs,
                        types,
-                       handlers,
-                       self.as_block(n.orelse, n.lineno),
-                       self.as_block(n.finalbody, n.lineno))
+                       handlers_,
+                       self.as_block(orelse, lineno),
+                       self.as_block(finalbody, lineno))
+
+    @with_line
+    def visit_Print(self, n: ast27.Print) -> ExpressionStmt:
+        keywords = []
+        if n.dest is not None:
+            keywords.append(ast27.keyword("file", n.dest))
+
+        if not n.nl:
+            keywords.append(ast27.keyword("end", ast27.Str(" ", lineno=n.lineno, col_offset=-1)))
+
+        # TODO: Rather then desugaring Print into an intermediary ast27.Call object, it might
+        # be more efficient to just directly create a mypy.node.CallExpr object.
+        call = ast27.Call(
+            ast27.Name("print", ast27.Load(), lineno=n.lineno, col_offset=-1),
+            n.values, keywords, None, None,
+            lineno=n.lineno, col_offset=-1)
+        return self.visit_Expr(ast27.Expr(call, lineno=n.lineno, col_offset=-1))
+
+    @with_line
+    def visit_Exec(self, n: ast27.Exec) -> ExpressionStmt:
+        new_globals = n.globals
+        new_locals = n.locals
+
+        if new_globals is None:
+            new_globals = ast27.Name("None", ast27.Load(), lineno=-1, col_offset=-1)
+        if new_locals is None:
+            new_locals = ast27.Name("None", ast27.Load(), lineno=-1, col_offset=-1)
+
+        # TODO: Comment in visit_Print also applies here
+        return self.visit_Expr(ast27.Expr(
+            ast27.Call(
+                ast27.Name("exec", ast27.Load(), lineno=n.lineno, col_offset=-1),
+                [n.body, new_globals, new_locals],
+                [], None, None,
+                lineno=n.lineno, col_offset=-1),
+            lineno=n.lineno, col_offset=-1))
+
+    @with_line
+    def visit_Repr(self, n: ast27.Repr) -> CallExpr:
+        # TODO: Comment in visit_Print also applies here
+        return self.visit_Call(ast27.Call(
+            ast27.Name("repr", ast27.Load(), lineno=n.lineno, col_offset=-1),
+            n.value,
+            [], None, None,
+            lineno=n.lineno, col_offset=-1))
 
     # Assert(expr test, expr? msg)
     @with_line
-    def visit_Assert(self, n: ast35.Assert) -> Node:
+    def visit_Assert(self, n: ast27.Assert) -> AssertStmt:
         return AssertStmt(self.visit(n.test))
 
     # Import(alias* names)
     @with_line
-    def visit_Import(self, n: ast35.Import) -> Node:
-        i = Import([(self.translate_module_id(a.name), a.asname) for a in n.names])
+    def visit_Import(self, n: ast27.Import) -> Import:
+        names = []  # type: List[Tuple[str, str]]
+        for alias in n.names:
+            name = self.translate_module_id(alias.name)
+            asname = alias.asname
+            if asname is None and name != alias.name:
+                # if the module name has been translated (and it's not already
+                # an explicit import-as), make it an implicit import-as the
+                # original name
+                asname = alias.name
+            names.append((name, asname))
+        i = Import(names)
         self.imports.append(i)
         return i
 
     # ImportFrom(identifier? module, alias* names, int? level)
     @with_line
-    def visit_ImportFrom(self, n: ast35.ImportFrom) -> Node:
+    def visit_ImportFrom(self, n: ast27.ImportFrom) -> ImportBase:
         i = None  # type: ImportBase
         if len(n.names) == 1 and n.names[0].name == '*':
             i = ImportAll(n.module, n.level)
@@ -520,61 +608,56 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Global(identifier* names)
     @with_line
-    def visit_Global(self, n: ast35.Global) -> Node:
+    def visit_Global(self, n: ast27.Global) -> GlobalDecl:
         return GlobalDecl(n.names)
 
-    # Nonlocal(identifier* names)
-    @with_line
-    def visit_Nonlocal(self, n: ast35.Nonlocal) -> Node:
-        return NonlocalDecl(n.names)
-
     # Expr(expr value)
     @with_line
-    def visit_Expr(self, n: ast35.Expr) -> Node:
+    def visit_Expr(self, n: ast27.Expr) -> ExpressionStmt:
         value = self.visit(n.value)
         return ExpressionStmt(value)
 
     # Pass
     @with_line
-    def visit_Pass(self, n: ast35.Pass) -> Node:
+    def visit_Pass(self, n: ast27.Pass) -> PassStmt:
         return PassStmt()
 
     # Break
     @with_line
-    def visit_Break(self, n: ast35.Break) -> Node:
+    def visit_Break(self, n: ast27.Break) -> BreakStmt:
         return BreakStmt()
 
     # Continue
     @with_line
-    def visit_Continue(self, n: ast35.Continue) -> Node:
+    def visit_Continue(self, n: ast27.Continue) -> ContinueStmt:
         return ContinueStmt()
 
     # --- expr ---
     # BoolOp(boolop op, expr* values)
     @with_line
-    def visit_BoolOp(self, n: ast35.BoolOp) -> Node:
+    def visit_BoolOp(self, n: ast27.BoolOp) -> OpExpr:
         # mypy translates (1 and 2 and 3) as (1 and (2 and 3))
         assert len(n.values) >= 2
         op = None
-        if isinstance(n.op, ast35.And):
+        if isinstance(n.op, ast27.And):
             op = 'and'
-        elif isinstance(n.op, ast35.Or):
+        elif isinstance(n.op, ast27.Or):
             op = 'or'
         else:
             raise RuntimeError('unknown BoolOp ' + str(type(n)))
 
         # potentially inefficient!
-        def group(vals: List[Node]) -> Node:
+        def group(vals: List[Expression]) -> OpExpr:
             if len(vals) == 2:
                 return OpExpr(op, vals[0], vals[1])
             else:
                 return OpExpr(op, vals[0], group(vals[1:]))
 
-        return group(self.visit_list(n.values))
+        return group(self.translate_expr_list(n.values))
 
     # BinOp(expr left, operator op, expr right)
     @with_line
-    def visit_BinOp(self, n: ast35.BinOp) -> Node:
+    def visit_BinOp(self, n: ast27.BinOp) -> OpExpr:
         op = self.from_operator(n.op)
 
         if op is None:
@@ -584,15 +667,15 @@ class ASTConverter(ast35.NodeTransformer):
 
     # UnaryOp(unaryop op, expr operand)
     @with_line
-    def visit_UnaryOp(self, n: ast35.UnaryOp) -> Node:
+    def visit_UnaryOp(self, n: ast27.UnaryOp) -> UnaryExpr:
         op = None
-        if isinstance(n.op, ast35.Invert):
+        if isinstance(n.op, ast27.Invert):
             op = '~'
-        elif isinstance(n.op, ast35.Not):
+        elif isinstance(n.op, ast27.Not):
             op = 'not'
-        elif isinstance(n.op, ast35.UAdd):
+        elif isinstance(n.op, ast27.UAdd):
             op = '+'
-        elif isinstance(n.op, ast35.USub):
+        elif isinstance(n.op, ast27.USub):
             op = '-'
 
         if op is None:
@@ -602,46 +685,48 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Lambda(arguments args, expr body)
     @with_line
-    def visit_Lambda(self, n: ast35.Lambda) -> Node:
-        body = ast35.Return(n.body)
+    def visit_Lambda(self, n: ast27.Lambda) -> FuncExpr:
+        body = ast27.Return(n.body)
         body.lineno = n.lineno
+        body.col_offset = n.col_offset
 
         return FuncExpr(self.transform_args(n.args, n.lineno),
                         self.as_block([body], n.lineno))
 
     # IfExp(expr test, expr body, expr orelse)
     @with_line
-    def visit_IfExp(self, n: ast35.IfExp) -> Node:
+    def visit_IfExp(self, n: ast27.IfExp) -> ConditionalExpr:
         return ConditionalExpr(self.visit(n.test),
                                self.visit(n.body),
                                self.visit(n.orelse))
 
     # Dict(expr* keys, expr* values)
     @with_line
-    def visit_Dict(self, n: ast35.Dict) -> Node:
-        return DictExpr(list(zip(self.visit_list(n.keys), self.visit_list(n.values))))
+    def visit_Dict(self, n: ast27.Dict) -> DictExpr:
+        return DictExpr(list(zip(self.translate_expr_list(n.keys),
+                                 self.translate_expr_list(n.values))))
 
     # Set(expr* elts)
     @with_line
-    def visit_Set(self, n: ast35.Set) -> Node:
-        return SetExpr(self.visit_list(n.elts))
+    def visit_Set(self, n: ast27.Set) -> SetExpr:
+        return SetExpr(self.translate_expr_list(n.elts))
 
     # ListComp(expr elt, comprehension* generators)
     @with_line
-    def visit_ListComp(self, n: ast35.ListComp) -> Node:
-        return ListComprehension(self.visit_GeneratorExp(cast(ast35.GeneratorExp, n)))
+    def visit_ListComp(self, n: ast27.ListComp) -> ListComprehension:
+        return ListComprehension(self.visit_GeneratorExp(cast(ast27.GeneratorExp, n)))
 
     # SetComp(expr elt, comprehension* generators)
     @with_line
-    def visit_SetComp(self, n: ast35.SetComp) -> Node:
-        return SetComprehension(self.visit_GeneratorExp(cast(ast35.GeneratorExp, n)))
+    def visit_SetComp(self, n: ast27.SetComp) -> SetComprehension:
+        return SetComprehension(self.visit_GeneratorExp(cast(ast27.GeneratorExp, n)))
 
     # DictComp(expr key, expr value, comprehension* generators)
     @with_line
-    def visit_DictComp(self, n: ast35.DictComp) -> Node:
+    def visit_DictComp(self, n: ast27.DictComp) -> DictionaryComprehension:
         targets = [self.visit(c.target) for c in n.generators]
         iters = [self.visit(c.iter) for c in n.generators]
-        ifs_list = [self.visit_list(c.ifs) for c in n.generators]
+        ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators]
         return DictionaryComprehension(self.visit(n.key),
                                        self.visit(n.value),
                                        targets,
@@ -650,107 +735,116 @@ class ASTConverter(ast35.NodeTransformer):
 
     # GeneratorExp(expr elt, comprehension* generators)
     @with_line
-    def visit_GeneratorExp(self, n: ast35.GeneratorExp) -> GeneratorExpr:
+    def visit_GeneratorExp(self, n: ast27.GeneratorExp) -> GeneratorExpr:
         targets = [self.visit(c.target) for c in n.generators]
         iters = [self.visit(c.iter) for c in n.generators]
-        ifs_list = [self.visit_list(c.ifs) for c in n.generators]
+        ifs_list = [self.translate_expr_list(c.ifs) for c in n.generators]
         return GeneratorExpr(self.visit(n.elt),
                              targets,
                              iters,
                              ifs_list)
 
-    # Await(expr value)
-    @with_line
-    def visit_Await(self, n: ast35.Await) -> Node:
-        v = self.visit(n.value)
-        return AwaitExpr(v)
-
     # Yield(expr? value)
     @with_line
-    def visit_Yield(self, n: ast35.Yield) -> Node:
+    def visit_Yield(self, n: ast27.Yield) -> YieldExpr:
         return YieldExpr(self.visit(n.value))
 
-    # YieldFrom(expr value)
-    @with_line
-    def visit_YieldFrom(self, n: ast35.YieldFrom) -> Node:
-        return YieldFromExpr(self.visit(n.value))
-
     # Compare(expr left, cmpop* ops, expr* comparators)
     @with_line
-    def visit_Compare(self, n: ast35.Compare) -> Node:
+    def visit_Compare(self, n: ast27.Compare) -> ComparisonExpr:
         operators = [self.from_comp_operator(o) for o in n.ops]
-        operands = self.visit_list([n.left] + n.comparators)
+        operands = self.translate_expr_list([n.left] + n.comparators)
         return ComparisonExpr(operators, operands)
 
     # Call(expr func, expr* args, keyword* keywords)
     # keyword = (identifier? arg, expr value)
     @with_line
-    def visit_Call(self, n: ast35.Call) -> Node:
-        def is_star2arg(k: ast35.keyword) -> bool:
-            return k.arg is None
+    def visit_Call(self, n: ast27.Call) -> CallExpr:
+        arg_types = []  # type: List[ast27.expr]
+        arg_kinds = []  # type: List[int]
+        signature = []  # type: List[Optional[str]]
+
+        arg_types.extend(n.args)
+        arg_kinds.extend(ARG_POS for a in n.args)
+        signature.extend(None for a in n.args)
+
+        if n.starargs is not None:
+            arg_types.append(n.starargs)
+            arg_kinds.append(ARG_STAR)
+            signature.append(None)
+
+        arg_types.extend(k.value for k in n.keywords)
+        arg_kinds.extend(ARG_NAMED for k in n.keywords)
+        signature.extend(k.arg for k in n.keywords)
+
+        if n.kwargs is not None:
+            arg_types.append(n.kwargs)
+            arg_kinds.append(ARG_STAR2)
+            signature.append(None)
 
-        arg_types = self.visit_list(
-            [a.value if isinstance(a, ast35.Starred) else a for a in n.args] +
-            [k.value for k in n.keywords])
-        arg_kinds = ([ARG_STAR if isinstance(a, ast35.Starred) else ARG_POS for a in n.args] +
-                     [ARG_STAR2 if is_star2arg(k) else ARG_NAMED for k in n.keywords])
         return CallExpr(self.visit(n.func),
-                        arg_types,
+                        self.translate_expr_list(arg_types),
                         arg_kinds,
-                        cast("List[str]", [None for _ in n.args]) + [k.arg for k in n.keywords])
+                        cast("List[str]", signature))
 
     # Num(object n) -- a number as a PyObject.
     @with_line
-    def visit_Num(self, n: ast35.Num) -> Node:
-        if isinstance(n.n, int):
-            return IntExpr(n.n)
-        elif isinstance(n.n, float):
-            return FloatExpr(n.n)
-        elif isinstance(n.n, complex):
-            return ComplexExpr(n.n)
+    def visit_Num(self, new: ast27.Num) -> Expression:
+        value = new.n
+        is_inverse = False
+        if str(new.n).startswith('-'):  # Hackish because of complex.
+            value = -new.n
+            is_inverse = True
+
+        expr = None  # type: Expression
+        if isinstance(value, int):
+            expr = IntExpr(value)
+        elif isinstance(value, float):
+            expr = FloatExpr(value)
+        elif isinstance(value, complex):
+            expr = ComplexExpr(value)
+        else:
+            raise RuntimeError('num not implemented for ' + str(type(new.n)))
 
-        raise RuntimeError('num not implemented for ' + str(type(n.n)))
+        if is_inverse:
+            expr = UnaryExpr('-', expr)
 
-    # Str(string s)
-    @with_line
-    def visit_Str(self, n: ast35.Str) -> Node:
-        if self.pyversion[0] >= 3 or self.is_stub:
-            # Hack: assume all string literals in Python 2 stubs are normal
-            # strs (i.e. not unicode).  All stubs are parsed with the Python 3
-            # parser, which causes unprefixed string literals to be interpreted
-            # as unicode instead of bytes.  This hack is generally okay,
-            # because mypy considers str literals to be compatible with
-            # unicode.
-            return StrExpr(n.s)
-        else:
-            return UnicodeExpr(n.s)
+        return expr
 
-    # Bytes(bytes s)
+    # Str(string s)
     @with_line
-    def visit_Bytes(self, n: ast35.Bytes) -> Node:
-        # The following line is a bit hacky, but is the best way to maintain
-        # compatibility with how mypy currently parses the contents of bytes literals.
-        contents = str(n.s)[2:-1]
-
-        if self.pyversion[0] >= 3:
-            return BytesExpr(contents)
+    def visit_Str(self, s: ast27.Str) -> Expression:
+        # Hack: assume all string literals in Python 2 stubs are normal
+        # strs (i.e. not unicode).  All stubs are parsed with the Python 3
+        # parser, which causes unprefixed string literals to be interpreted
+        # as unicode instead of bytes.  This hack is generally okay,
+        # because mypy considers str literals to be compatible with
+        # unicode.
+        if isinstance(s.s, bytes):
+            n = s.s
+            # The following line is a bit hacky, but is the best way to maintain
+            # compatibility with how mypy currently parses the contents of bytes literals.
+            contents = str(n)[2:-1]
+
+            if self.pyversion[0] >= 3:
+                return BytesExpr(contents)
+            else:
+                return StrExpr(contents)
         else:
-            return StrExpr(contents)
-
-    # NameConstant(singleton value)
-    def visit_NameConstant(self, n: ast35.NameConstant) -> Node:
-        return NameExpr(str(n.value))
+            if self.pyversion[0] >= 3 or self.is_stub:
+                return StrExpr(s.s)
+            else:
+                return UnicodeExpr(s.s)
 
     # Ellipsis
-    @with_line
-    def visit_Ellipsis(self, n: ast35.Ellipsis) -> Node:
+    def visit_Ellipsis(self, n: ast27.Ellipsis) -> EllipsisExpr:
         return EllipsisExpr()
 
     # Attribute(expr value, identifier attr, expr_context ctx)
     @with_line
-    def visit_Attribute(self, n: ast35.Attribute) -> Node:
-        if (isinstance(n.value, ast35.Call) and
-                isinstance(n.value.func, ast35.Name) and
+    def visit_Attribute(self, n: ast27.Attribute) -> Expression:
+        if (isinstance(n.value, ast27.Call) and
+                isinstance(n.value.func, ast27.Name) and
                 n.value.func.id == 'super'):
             return SuperExpr(n.attr)
 
@@ -758,107 +852,36 @@ class ASTConverter(ast35.NodeTransformer):
 
     # Subscript(expr value, slice slice, expr_context ctx)
     @with_line
-    def visit_Subscript(self, n: ast35.Subscript) -> Node:
+    def visit_Subscript(self, n: ast27.Subscript) -> IndexExpr:
         return IndexExpr(self.visit(n.value), self.visit(n.slice))
 
-    # Starred(expr value, expr_context ctx)
-    @with_line
-    def visit_Starred(self, n: ast35.Starred) -> Node:
-        return StarExpr(self.visit(n.value))
-
     # Name(identifier id, expr_context ctx)
     @with_line
-    def visit_Name(self, n: ast35.Name) -> Node:
+    def visit_Name(self, n: ast27.Name) -> NameExpr:
         return NameExpr(n.id)
 
     # List(expr* elts, expr_context ctx)
     @with_line
-    def visit_List(self, n: ast35.List) -> Node:
+    def visit_List(self, n: ast27.List) -> ListExpr:
         return ListExpr([self.visit(e) for e in n.elts])
 
     # Tuple(expr* elts, expr_context ctx)
     @with_line
-    def visit_Tuple(self, n: ast35.Tuple) -> Node:
+    def visit_Tuple(self, n: ast27.Tuple) -> TupleExpr:
         return TupleExpr([self.visit(e) for e in n.elts])
 
     # --- slice ---
 
     # Slice(expr? lower, expr? upper, expr? step)
-    def visit_Slice(self, n: ast35.Slice) -> Node:
+    def visit_Slice(self, n: ast27.Slice) -> SliceExpr:
         return SliceExpr(self.visit(n.lower),
                          self.visit(n.upper),
                          self.visit(n.step))
 
     # ExtSlice(slice* dims)
-    def visit_ExtSlice(self, n: ast35.ExtSlice) -> Node:
-        return TupleExpr(self.visit_list(n.dims))
+    def visit_ExtSlice(self, n: ast27.ExtSlice) -> TupleExpr:
+        return TupleExpr(self.translate_expr_list(n.dims))
 
     # Index(expr value)
-    def visit_Index(self, n: ast35.Index) -> Node:
+    def visit_Index(self, n: ast27.Index) -> Expression:
         return self.visit(n.value)
-
-
-class TypeConverter(ast35.NodeTransformer):
-    def __init__(self, line: int = -1) -> None:
-        self.line = line
-
-    def generic_visit(self, node: ast35.AST) -> None:
-        raise TypeCommentParseError(TYPE_COMMENT_AST_ERROR, self.line)
-
-    def visit_NoneType(self, n: Any) -> Type:
-        return None
-
-    def visit_list(self, l: Sequence[ast35.AST]) -> List[Type]:
-        return [self.visit(e) for e in l]
-
-    def visit_Name(self, n: ast35.Name) -> Type:
-        return UnboundType(n.id, line=self.line)
-
-    def visit_NameConstant(self, n: ast35.NameConstant) -> Type:
-        return UnboundType(str(n.value))
-
-    # Str(string s)
-    def visit_Str(self, n: ast35.Str) -> Type:
-        return parse_type_comment(n.s.strip(), line=self.line)
-
-    # Subscript(expr value, slice slice, expr_context ctx)
-    def visit_Subscript(self, n: ast35.Subscript) -> Type:
-        assert isinstance(n.slice, ast35.Index)
-
-        value = self.visit(n.value)
-
-        assert isinstance(value, UnboundType)
-        assert not value.args
-
-        if isinstance(n.slice.value, ast35.Tuple):
-            params = self.visit_list(n.slice.value.elts)
-        else:
-            params = [self.visit(n.slice.value)]
-
-        return UnboundType(value.name, params, line=self.line)
-
-    def visit_Tuple(self, n: ast35.Tuple) -> Type:
-        return TupleType(self.visit_list(n.elts), None, implicit=True, line=self.line)
-
-    # Attribute(expr value, identifier attr, expr_context ctx)
-    def visit_Attribute(self, n: ast35.Attribute) -> Type:
-        before_dot = self.visit(n.value)
-
-        assert isinstance(before_dot, UnboundType)
-        assert not before_dot.args
-
-        return UnboundType("{}.{}".format(before_dot.name, n.attr), line=self.line)
-
-    # Ellipsis
-    def visit_Ellipsis(self, n: ast35.Ellipsis) -> Type:
-        return EllipsisType(line=self.line)
-
-    # List(expr* elts, expr_context ctx)
-    def visit_List(self, n: ast35.List) -> Type:
-        return TypeList(self.visit_list(n.elts), line=self.line)
-
-
-class TypeCommentParseError(Exception):
-    def __init__(self, msg: str, lineno: int) -> None:
-        self.msg = msg
-        self.lineno = lineno
diff --git a/mypy/fixup.py b/mypy/fixup.py
index 929da24..eec31ec 100644
--- a/mypy/fixup.py
+++ b/mypy/fixup.py
@@ -90,9 +90,6 @@ class NodeFixer(NodeVisitor[None]):
             func.info = self.current_info
         if func.type is not None:
             func.type.accept(self.type_fixer)
-        for arg in func.arguments:
-            if arg.type_annotation is not None:
-                arg.type_annotation.accept(self.type_fixer)
 
     def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None:
         if self.current_info is not None:
@@ -236,9 +233,9 @@ def lookup_qualified_stnode(modules: Dict[str, MypyFile], name: str) -> SymbolTa
     while True:
         assert '.' in head, "Cannot find %s" % (name,)
         head, tail = head.rsplit('.', 1)
+        rest.append(tail)
         mod = modules.get(head)
         if mod is not None:
-            rest.append(tail)
             break
     names = mod.names
     while True:
diff --git a/mypy/indirection.py b/mypy/indirection.py
new file mode 100644
index 0000000..77c5a59
--- /dev/null
+++ b/mypy/indirection.py
@@ -0,0 +1,103 @@
+from typing import Dict, Iterable, List, Optional, Set
+from abc import abstractmethod
+
+from mypy.visitor import NodeVisitor
+from mypy.types import TypeVisitor
+from mypy.nodes import MODULE_REF
+import mypy.nodes as nodes
+import mypy.types as types
+from mypy.util import split_module_names
+
+
+def extract_module_names(type_name: Optional[str]) -> List[str]:
+    """Returns the module names of a fully qualified type name."""
+    if type_name is not None:
+        # Discard the first one, which is just the qualified name of the type
+        possible_module_names = split_module_names(type_name)
+        return possible_module_names[1:]
+    else:
+        return []
+
+
+class TypeIndirectionVisitor(TypeVisitor[Set[str]]):
+    """Returns all module references within a particular type."""
+
+    def __init__(self) -> None:
+        self.cache = {}  # type: Dict[types.Type, Set[str]]
+
+    def find_modules(self, typs: Iterable[types.Type]) -> Set[str]:
+        return self._visit(*typs)
+
+    def _visit(self, *typs: types.Type) -> Set[str]:
+        output = set()  # type: Set[str]
+        for typ in typs:
+            if typ in self.cache:
+                modules = self.cache[typ]
+            else:
+                modules = typ.accept(self)
+                self.cache[typ] = set(modules)
+            output.update(modules)
+        return output
+
+    def visit_unbound_type(self, t: types.UnboundType) -> Set[str]:
+        return self._visit(*t.args)
+
+    def visit_type_list(self, t: types.TypeList) -> Set[str]:
+        return self._visit(*t.items)
+
+    def visit_error_type(self, t: types.ErrorType) -> Set[str]:
+        return set()
+
+    def visit_any(self, t: types.AnyType) -> Set[str]:
+        return set()
+
+    def visit_void(self, t: types.Void) -> Set[str]:
+        return set()
+
+    def visit_none_type(self, t: types.NoneTyp) -> Set[str]:
+        return set()
+
+    def visit_uninhabited_type(self, t: types.UninhabitedType) -> Set[str]:
+        return set()
+
+    def visit_erased_type(self, t: types.ErasedType) -> Set[str]:
+        return set()
+
+    def visit_deleted_type(self, t: types.DeletedType) -> Set[str]:
+        return set()
+
+    def visit_type_var(self, t: types.TypeVarType) -> Set[str]:
+        return self._visit(*t.values) | self._visit(t.upper_bound)
+
+    def visit_instance(self, t: types.Instance) -> Set[str]:
+        out = self._visit(*t.args)
+        if t.type is not None:
+            out.update(split_module_names(t.type.module_name))
+        return out
+
+    def visit_callable_type(self, t: types.CallableType) -> Set[str]:
+        out = self._visit(*t.arg_types) | self._visit(t.ret_type)
+        if t.definition is not None:
+            out.update(extract_module_names(t.definition.fullname()))
+        return out
+
+    def visit_overloaded(self, t: types.Overloaded) -> Set[str]:
+        return self._visit(*t.items()) | self._visit(t.fallback)
+
+    def visit_tuple_type(self, t: types.TupleType) -> Set[str]:
+        return self._visit(*t.items) | self._visit(t.fallback)
+
+    def visit_star_type(self, t: types.StarType) -> Set[str]:
+        return set()
+
+    def visit_union_type(self, t: types.UnionType) -> Set[str]:
+        return self._visit(*t.items)
+
+    def visit_partial_type(self, t: types.PartialType) -> Set[str]:
+        return set()
+
+    def visit_ellipsis_type(self, t: types.EllipsisType) -> Set[str]:
+        return set()
+
+    def visit_type_type(self, t: types.TypeType) -> Set[str]:
+        return self._visit(t.item)
diff --git a/mypy/join.py b/mypy/join.py
index b5de3fd..c6d6333 100644
--- a/mypy/join.py
+++ b/mypy/join.py
@@ -1,6 +1,6 @@
 """Calculation of the least upper bound types (joins)."""
 
-from typing import cast, List
+from typing import List
 
 from mypy.types import (
     Type, AnyType, NoneTyp, Void, TypeVisitor, Instance, UnboundType,
@@ -231,8 +231,9 @@ class TypeJoinVisitor(TypeVisitor[Type]):
             items = []  # type: List[Type]
             for i in range(t.length()):
                 items.append(self.join(t.items[i], self.s.items[i]))
-            # TODO: What if the fallback types are different?
-            return TupleType(items, t.fallback)
+            # join fallback types if they are different
+            from typing import cast
+            return TupleType(items, cast(Instance, join_instances(self.s.fallback, t.fallback)))
         else:
             return self.default(self.s)
 
diff --git a/mypy/lex.py b/mypy/lex.py
index 41f4262..f074de9 100644
--- a/mypy/lex.py
+++ b/mypy/lex.py
@@ -27,6 +27,7 @@ class Token:
         self.string = string
         self.pre = pre
         self.line = 0
+        self.column = 0
 
     def __repr__(self) -> str:
         """The representation is of form 'Keyword(  if)'."""
@@ -273,9 +274,10 @@ def escape_repl(m: Match[str], prefix: str) -> str:
 class Lexer:
     """Lexical analyzer."""
 
-    i = 0      # Current string index (into s)
-    s = ''     # The string being analyzed
-    line = 0   # Current line number
+    i = 0       # Current string index (into s)
+    s = ''      # The string being analyzed
+    line = 0    # Current line number
+    column = 0  # Current column number
     pre_whitespace = ''     # Whitespace and comments before the next token
     enc = ''                # Encoding
 
@@ -339,6 +341,7 @@ class Lexer:
         """Lexically analyze a string, storing the tokens at the tok list."""
         self.i = 0
         self.line = first_line
+        self.column = 0
 
         if isinstance(text, bytes):
             if text.startswith(b'\xef\xbb\xbf'):
@@ -612,6 +615,7 @@ class Lexer:
         line = self.line
         ss = self.s[self.i:self.i + len(prefix) + 3]
         self.i += len(prefix) + 3
+        self.column += len(prefix) + 3
         while True:
             m = re3end.match(self.s, self.i)
             if m is not None:
@@ -625,6 +629,7 @@ class Lexer:
             ss += s
             self.line += 1
             self.i += len(s)
+            self.column += len(s)
         lit = None  # type: Token
         if 'b' in prefix or 'B' in prefix:
             lit = BytesLit(ss + m.group(0))
@@ -642,6 +647,7 @@ class Lexer:
         """
         line = self.line
         self.i += len(prefix)
+        self.column += len(prefix)
         ss = prefix
         while True:
             m = self.match(re_end)
@@ -652,6 +658,7 @@ class Lexer:
             ss += m
             self.line += 1
             self.i += len(m)
+            self.column += len(m)
             if not m.endswith('\n') and not m.endswith('\r'): break
         self.add_special_token(StrLit(ss), line, 0)  # TODO bytes
 
@@ -740,15 +747,18 @@ class Lexer:
             last_tok.string += self.pre_whitespace + s
             self.i += len(s)
             self.line += 1
+            self.column = 0
             self.pre_whitespace = ''
             if was_semicolon:
                 self.lex_indent()
         elif self.ignore_break():
             self.add_pre_whitespace(s)
             self.line += 1
+            self.column = 0
         else:
             self.add_token(Break(s))
             self.line += 1
+            self.column = 0
             self.lex_indent()
 
     def lex_semicolon(self) -> None:
@@ -828,6 +838,7 @@ class Lexer:
         """
         self.pre_whitespace += s
         self.i += len(s)
+        self.column += len(s)
 
     type_ignore_exp = re.compile(r'[ \t]*#[ \t]*type:[ \t]*ignore\b')
 
@@ -849,8 +860,10 @@ class Lexer:
                 delta += 1
             self.ignored_lines.add(self.line - delta)
         tok.line = self.line
+        tok.column = self.column
         self.tok.append(tok)
         self.i += len(tok.string)
+        self.column += len(tok.string)
         self.pre_whitespace = ''
 
     def add_special_token(self, tok: Token, line: int, skip: int) -> None:
@@ -864,6 +877,7 @@ class Lexer:
         tok.line = line
         self.tok.append(tok)
         self.i += skip
+        self.column += skip
         self.pre_whitespace = ''
 
     def ignore_break(self) -> bool:
diff --git a/mypy/main.py b/mypy/main.py
index de1f628..62d068c 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -1,19 +1,21 @@
 """Mypy type checker command line tool."""
 
 import argparse
+import configparser
 import os
 import re
 import sys
 
-from typing import Any, Dict, List, Optional, Set, Tuple
+from typing import Any, Dict, List, Mapping, Optional, Set, Tuple
 
 from mypy import build
 from mypy import defaults
 from mypy import git
 from mypy import experiments
 from mypy.build import BuildSource, BuildResult, PYTHON_EXTENSIONS
-from mypy.errors import CompileError, set_drop_into_pdb, set_show_tb
+from mypy.errors import CompileError
 from mypy.options import Options, BuildType
+from mypy.report import reporter_classes
 
 from mypy.version import __version__
 
@@ -31,10 +33,6 @@ def main(script_path: str) -> None:
     else:
         bin_dir = None
     sources, options = process_options(sys.argv[1:])
-    if options.pdb:
-        set_drop_into_pdb(True)
-    if options.show_traceback:
-        set_show_tb(True)
     f = sys.stdout
     try:
         res = type_check_only(sources, bin_dir, options)
@@ -116,18 +114,16 @@ def parse_version(v: str) -> Tuple[int, int]:
             "Invalid python version '{}' (expected format: 'x.y')".format(v))
 
 
-def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
-    """Process command line arguments.
-
-    Return (mypy program path (or None),
-            module to run as script (or None),
-            parsed flags)
-    """
+def process_options(args: List[str],
+                    require_targets: bool = True
+                    ) -> Tuple[List[BuildSource], Options]:
+    """Parse command line arguments."""
 
     # Make the help output a little less jarring.
     help_factory = (lambda prog:
                     argparse.RawDescriptionHelpFormatter(prog=prog, max_help_position=28))
     parser = argparse.ArgumentParser(prog='mypy', epilog=FOOTER,
+                                     fromfile_prefix_chars='@',
                                      formatter_class=help_factory)
 
     # Unless otherwise specified, arguments will be parsed directly onto an
@@ -157,6 +153,8 @@ def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
                         " or with incomplete type annotations")
     parser.add_argument('--check-untyped-defs', action='store_true',
                         help="type check the interior of functions without type annotations")
+    parser.add_argument('--disallow-subclassing-any', action='store_true',
+                        help="disallow subclassing values of type 'Any' when defining classes")
     parser.add_argument('--warn-incomplete-stub', action='store_true',
                         help="warn if missing type annotation in typeshed, only relevant with"
                         " --check-untyped-defs enabled")
@@ -164,19 +162,24 @@ def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
                         help="warn about casting an expression to its inferred type")
     parser.add_argument('--warn-unused-ignores', action='store_true',
                         help="warn about unneeded '# type: ignore' comments")
-    parser.add_argument('--suppress-error-context', action='store_true',
-                        dest='suppress_error_context',
-                        help="Suppress context notes before errors")
+    parser.add_argument('--hide-error-context', action='store_true',
+                        dest='hide_error_context',
+                        help="Hide context notes before errors")
     parser.add_argument('--fast-parser', action='store_true',
                         help="enable experimental fast parser")
     parser.add_argument('-i', '--incremental', action='store_true',
                         help="enable experimental module cache")
     parser.add_argument('--cache-dir', action='store', metavar='DIR',
                         help="store module cache info in the given folder in incremental mode "
-                        "(defaults to '{}')".format(defaults.MYPY_CACHE))
+                        "(defaults to '{}')".format(defaults.CACHE_DIR))
     parser.add_argument('--strict-optional', action='store_true',
-                        dest='special-opts:strict_optional',
+                        dest='strict_optional',
                         help="enable experimental strict Optional checks")
+    parser.add_argument('--strict-optional-whitelist', metavar='GLOB', nargs='*',
+                        help="suppress strict Optional errors in all but the provided files "
+                        "(experimental -- read documentation before using!).  "
+                        "Implies --strict-optional.  Has the undesirable side-effect of "
+                        "suppressing other errors in non-whitelisted files.")
     parser.add_argument('--pdb', action='store_true', help="invoke pdb on fatal error")
     parser.add_argument('--show-traceback', '--tb', action='store_true',
                         help="show traceback on fatal error")
@@ -185,6 +188,14 @@ def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
                         help="dump type inference stats")
     parser.add_argument('--custom-typing', metavar='MODULE', dest='custom_typing_module',
                         help="use a custom typing module")
+    parser.add_argument('--scripts-are-modules', action='store_true',
+                        help="Script x becomes module x instead of __main__")
+    parser.add_argument('--config-file',
+                        help="Configuration file, must have a [mypy] section "
+                        "(defaults to {})".format(defaults.CONFIG_FILE))
+    parser.add_argument('--show-column-numbers', action='store_true',
+                        dest='show_column_numbers',
+                        help="Show column numbers in error messages")
     # hidden options
     # --shadow-file a.py tmp.py will typecheck tmp.py in place of a.py.
     # Useful for tools to make transformations to a file to get more
@@ -192,6 +203,10 @@ def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
     # (e.g. by adding a call to reveal_type).
     parser.add_argument('--shadow-file', metavar='PATH', nargs=2, dest='shadow_file',
                         help=argparse.SUPPRESS)
+    # --debug-cache will disable any cache-related compressions/optimizations,
+    # which will make the cache writing process output pretty-printed JSON (which
+    # is easier to debug).
+    parser.add_argument('--debug-cache', action='store_true', help=argparse.SUPPRESS)
     # deprecated options
     parser.add_argument('--silent', action='store_true', dest='special-opts:silent',
                         help=argparse.SUPPRESS)
@@ -205,36 +220,38 @@ def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
     report_group = parser.add_argument_group(
         title='report generation',
         description='Generate a report in the specified format.')
-    report_group.add_argument('--html-report', metavar='DIR',
-                              dest='special-opts:html_report')
-    report_group.add_argument('--old-html-report', metavar='DIR',
-                              dest='special-opts:old_html_report')
-    report_group.add_argument('--xslt-html-report', metavar='DIR',
-                              dest='special-opts:xslt_html_report')
-    report_group.add_argument('--xml-report', metavar='DIR',
-                              dest='special-opts:xml_report')
-    report_group.add_argument('--txt-report', metavar='DIR',
-                              dest='special-opts:txt_report')
-    report_group.add_argument('--xslt-txt-report', metavar='DIR',
-                              dest='special-opts:xslt_txt_report')
-    report_group.add_argument('--linecount-report', metavar='DIR',
-                              dest='special-opts:linecount_report')
+    for report_type in reporter_classes:
+        report_group.add_argument('--%s-report' % report_type.replace('_', '-'),
+                                  metavar='DIR',
+                                  dest='special-opts:%s_report' % report_type)
 
     code_group = parser.add_argument_group(title='How to specify the code to type check')
     code_group.add_argument('-m', '--module', action='append', metavar='MODULE',
                             dest='special-opts:modules',
                             help="type-check module; can repeat for more modules")
-    # TODO: `mypy -c A -c B` and `mypy -p A -p B` currently silently
-    # ignore A (last option wins).  Perhaps -c, -m and -p could just
-    # be command-line flags that modify how we interpret self.files?
-    code_group.add_argument('-c', '--command', metavar='PROGRAM_TEXT', dest='special-opts:command',
+    # TODO: `mypy -p A -p B` currently silently ignores ignores A
+    # (last option wins).  Perhaps -c, -m and -p could just be
+    # command-line flags that modify how we interpret self.files?
+    code_group.add_argument('-c', '--command', action='append', metavar='PROGRAM_TEXT',
+                            dest='special-opts:command',
                             help="type-check program passed in as string")
     code_group.add_argument('-p', '--package', metavar='PACKAGE', dest='special-opts:package',
                             help="type-check all files in a directory")
     code_group.add_argument(metavar='files', nargs='*', dest='special-opts:files',
                             help="type-check given files or directories")
 
+    # Parse arguments once into a dummy namespace so we can get the
+    # filename for the config file.
+    dummy = argparse.Namespace()
+    parser.parse_args(args, dummy)
+    config_file = dummy.config_file or defaults.CONFIG_FILE
+
+    # Parse config file first, so command line can override.
     options = Options()
+    if config_file and os.path.exists(config_file):
+        parse_config_file(options, config_file)
+
+    # Parse command line for real, using a split namespace.
     special_opts = argparse.Namespace()
     parser.parse_args(args, SplitNamespace(options, special_opts, 'special-opts:'))
 
@@ -257,17 +274,21 @@ def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
               file=sys.stderr)
 
     # Check for invalid argument combinations.
-    code_methods = sum(bool(c) for c in [special_opts.modules,
-                                         special_opts.command,
-                                         special_opts.package,
-                                         special_opts.files])
-    if code_methods == 0:
-        parser.error("Missing target module, package, files, or command.")
-    elif code_methods > 1:
-        parser.error("May only specify one of: module, package, files, or command.")
+    if require_targets:
+        code_methods = sum(bool(c) for c in [special_opts.modules,
+                                            special_opts.command,
+                                            special_opts.package,
+                                            special_opts.files])
+        if code_methods == 0:
+            parser.error("Missing target module, package, files, or command.")
+        elif code_methods > 1:
+            parser.error("May only specify one of: module, package, files, or command.")
 
     # Set build flags.
-    if special_opts.strict_optional:
+    if options.strict_optional_whitelist is not None:
+        # TODO: Deprecate, then kill this flag
+        options.strict_optional = True
+    if options.strict_optional:
         experiments.STRICT_OPTIONAL = True
 
     # Set reports.
@@ -294,7 +315,7 @@ def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
         return targets, options
     elif special_opts.command:
         options.build_type = BuildType.PROGRAM_TEXT
-        return [BuildSource(None, None, special_opts.command)], options
+        return [BuildSource(None, None, '\n'.join(special_opts.command))], options
     else:
         targets = []
         for f in special_opts.files:
@@ -307,7 +328,8 @@ def process_options(args: List[str]) -> Tuple[List[BuildSource], Options]:
                          .format(f))
                 targets.extend(sub_targets)
             else:
-                targets.append(BuildSource(f, None, None))
+                mod = os.path.basename(f) if options.scripts_are_modules else None
+                targets.append(BuildSource(f, mod, None))
         return targets, options
 
 
@@ -401,6 +423,102 @@ def get_init_file(dir: str) -> Optional[str]:
     return None
 
 
+# For most options, the type of the default value set in options.py is
+# sufficient, and we don't have to do anything here.  This table
+# exists to specify types for values initialized to None or container
+# types.
+config_types = {
+    # TODO: Check validity of python version
+    'python_version': lambda s: tuple(map(int, s.split('.'))),
+    'strict_optional_whitelist': lambda s: s.split(),
+    'custom_typing_module': str,
+}
+
+
+def parse_config_file(options: Options, filename: str) -> None:
+    """Parse a config file into an Options object.
+
+    Errors are written to stderr but are not fatal.
+    """
+    parser = configparser.RawConfigParser()
+    try:
+        parser.read(filename)
+    except configparser.Error as err:
+        print("%s: %s" % (filename, err), file=sys.stderr)
+        return
+    if 'mypy' not in parser:
+        print("%s: No [mypy] section in config file" % filename, file=sys.stderr)
+        return
+
+    section = parser['mypy']
+    prefix = '%s: [%s]' % (filename, 'mypy')
+    updates, report_dirs = parse_section(prefix, options, section)
+    for k, v in updates.items():
+        setattr(options, k, v)
+    options.report_dirs.update(report_dirs)
+
+    for name, section in parser.items():
+        if name.startswith('mypy-'):
+            prefix = '%s: [%s]' % (filename, name)
+            updates, report_dirs = parse_section(prefix, options, section)
+            # TODO: Limit updates to flags that can be per-file.
+            if report_dirs:
+                print("%s: Per-file sections should not specify reports (%s)" %
+                      (prefix, ', '.join(s + '_report' for s in sorted(report_dirs))),
+                      file=sys.stderr)
+            if set(updates) - Options.PER_FILE_OPTIONS:
+                print("%s: Per-file sections should only specify per-file flags (%s)" %
+                      (prefix, ', '.join(sorted(set(updates) - Options.PER_FILE_OPTIONS))),
+                      file=sys.stderr)
+                updates = {k: v for k, v in updates.items() if k in Options.PER_FILE_OPTIONS}
+            globs = name[5:]
+            for glob in globs.split(','):
+                options.per_file_options[glob] = updates
+
+
+def parse_section(prefix: str, template: Options,
+                  section: Mapping[str, str]) -> Tuple[Dict[str, object], Dict[str, str]]:
+    """Parse one section of a config file.
+
+    Returns a dict of option values encountered, and a dict of report directories.
+    """
+    results = {}
+    report_dirs = {}  # type: Dict[str, str]
+    for key in section:
+        key = key.replace('-', '_')
+        if key in config_types:
+            ct = config_types[key]
+        else:
+            dv = getattr(template, key, None)
+            if dv is None:
+                if key.endswith('_report'):
+                    report_type = key[:-7].replace('_', '-')
+                    if report_type in reporter_classes:
+                        report_dirs[report_type] = section.get(key)
+                    else:
+                        print("%s: Unrecognized report type: %s" % (prefix, key),
+                              file=sys.stderr)
+                    continue
+                print("%s: Unrecognized option: %s = %s" % (prefix, key, section[key]),
+                      file=sys.stderr)
+                continue
+            ct = type(dv)
+        v = None  # type: Any
+        try:
+            if ct is bool:
+                v = section.getboolean(key)  # type: ignore  # Until better stub
+            elif callable(ct):
+                v = ct(section.get(key))
+            else:
+                print("%s: Don't know what type %s should have" % (prefix, key), file=sys.stderr)
+                continue
+        except ValueError as err:
+            print("%s: %s: %s" % (prefix, key, err), file=sys.stderr)
+            continue
+        results[key] = v
+    return results, report_dirs
+
+
 def fail(msg: str) -> None:
     sys.stderr.write('%s\n' % msg)
     sys.exit(1)
diff --git a/mypy/messages.py b/mypy/messages.py
index d04050f..b4828f1 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -148,6 +148,7 @@ class MessageBuilder:
         """Report an error or note (unless disabled)."""
         if self.disable_count <= 0:
             self.errors.report(context.get_line() if context else -1,
+                               context.get_column() if context else -1,
                                msg.strip(), severity=severity, file=file)
 
     def fail(self, msg: str, context: Context, file: str = None) -> None:
@@ -235,7 +236,7 @@ class MessageBuilder:
                 for arg in itype.args:
                     a.append(strip_quotes(self.format(arg)))
                 s = ', '.join(a)
-                if len((base_str + s)) < 25:
+                if len((base_str + s)) < 150:
                     return '{}[{}]'.format(base_str, s)
                 else:
                     return '{}[...]'.format(base_str)
@@ -250,7 +251,7 @@ class MessageBuilder:
             for t in typ.items:
                 items.append(strip_quotes(self.format(t)))
             s = '"Tuple[{}]"'.format(', '.join(items))
-            if len(s) < 40:
+            if len(s) < 400:
                 return s
             else:
                 return 'tuple(length {})'.format(len(items))
@@ -266,7 +267,7 @@ class MessageBuilder:
                 for t in typ.items:
                     items.append(strip_quotes(self.format(t)))
                 s = '"Union[{}]"'.format(', '.join(items))
-                if len(s) < 40:
+                if len(s) < 400:
                     return s
                 else:
                     return 'union type ({} items)'.format(len(items))
@@ -417,7 +418,7 @@ class MessageBuilder:
 
     def untyped_function_call(self, callee: CallableType, context: Context) -> Type:
         name = callee.name if callee.name is not None else '(unknown)'
-        self.fail('call to untyped function {} in typed context'.format(name), context)
+        self.fail('Call to untyped function {} in typed context'.format(name), context)
         return AnyType()
 
     def incompatible_argument(self, n: int, m: int, callee: CallableType, arg_type: Type,
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 31dc552..469b308 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -19,6 +19,9 @@ class Context:
     @abstractmethod
     def get_line(self) -> int: pass
 
+    @abstractmethod
+    def get_column(self) -> int: pass
+
 
 if False:
     # break import cycle only needed for mypy
@@ -92,6 +95,7 @@ class Node(Context):
     """Common base class for all non-type parse tree nodes."""
 
     line = -1
+    column = -1
 
     literal = LITERAL_NO
     literal_hash = None  # type: Any
@@ -102,26 +106,42 @@ class Node(Context):
             return repr(self)
         return ans
 
-    def set_line(self, target: Union[Token, 'Node', int]) -> 'Node':
+    def set_line(self, target: Union[Token, 'Node', int], column: int = None) -> None:
+        """If target is a node or token, pull line (and column) information
+        into this node. If column is specified, this will override any column
+        information coming from a node/token.
+        """
         if isinstance(target, int):
             self.line = target
         else:
             self.line = target.line
-        return self
+            self.column = target.column
+
+        if column is not None:
+            self.column = column
 
     def get_line(self) -> int:
         # TODO this should be just 'line'
         return self.line
 
+    def get_column(self) -> int:
+        # TODO this should be just 'column'
+        return self.column
+
     def accept(self, visitor: NodeVisitor[T]) -> T:
         raise RuntimeError('Not implemented')
 
 
-# These are placeholders for a future refactoring; see #1783.
-# For now they serve as (unchecked) documentation of what various
-# fields of Node subtypes are expected to contain.
-Statement = Node
-Expression = Node
+class Statement(Node):
+    """A statement node."""
+
+
+class Expression(Node):
+    """An expression node."""
+
+
+# TODO: Union['NameExpr', 'TupleExpr', 'ListExpr', 'MemberExpr', 'IndexExpr']; see #1783.
+Lvalue = Expression
 
 
 class SymbolNode(Node):
@@ -171,20 +191,16 @@ class MypyFile(SymbolNode, Statement):
     ignored_lines = None  # type: Set[int]
     # Is this file represented by a stub file (.pyi)?
     is_stub = False
-    # Do weak typing globally in the file?
-    weak_opts = None  # type: Set[str]
 
     def __init__(self,
                  defs: List[Statement],
                  imports: List['ImportBase'],
                  is_bom: bool = False,
-                 ignored_lines: Set[int] = None,
-                 weak_opts: Set[str] = None) -> None:
+                 ignored_lines: Set[int] = None) -> None:
         self.defs = defs
         self.line = 1  # Dummy line number
         self.imports = imports
         self.is_bom = is_bom
-        self.weak_opts = weak_opts
         if ignored_lines:
             self.ignored_lines = ignored_lines
         else:
@@ -256,6 +272,8 @@ class Import(ImportBase):
 class ImportFrom(ImportBase):
     """from m import x [as y], ..."""
 
+    id = None  # type: str
+    relative = None  # type: int
     names = None  # type: List[Tuple[str, Optional[str]]]  # Tuples (name, as name)
 
     def __init__(self, id: str, relative: int, names: List[Tuple[str, Optional[str]]]) -> None:
@@ -270,6 +288,8 @@ class ImportFrom(ImportBase):
 
 class ImportAll(ImportBase):
     """from m import *"""
+    id = None  # type: str
+    relative = None  # type: int
 
     def __init__(self, id: str, relative: int) -> None:
         super().__init__()
@@ -374,24 +394,24 @@ class Argument(Node):
         assign = AssignmentStmt([lvalue], rvalue)
         return assign
 
-    def set_line(self, target: Union[Token, Node, int]) -> Node:
-        super().set_line(target)
+    def set_line(self, target: Union[Token, Node, int], column: int = None) -> None:
+        super().set_line(target, column)
 
         if self.initializer:
-            self.initializer.set_line(self.line)
+            self.initializer.set_line(self.line, self.column)
 
-        self.variable.set_line(self.line)
+        self.variable.set_line(self.line, self.column)
 
         if self.initialization_statement:
-            self.initialization_statement.set_line(self.line)
-            self.initialization_statement.lvalues[0].set_line(self.line)
+            self.initialization_statement.set_line(self.line, self.column)
+            self.initialization_statement.lvalues[0].set_line(self.line, self.column)
 
     def serialize(self) -> JsonDict:
+        # Note: we are deliberately not saving the type annotation since
+        # it is not used by later stages of mypy.
         data = {'.class': 'Argument',
                 'kind': self.kind,
                 'variable': self.variable.serialize(),
-                'type_annotation': (None if self.type_annotation is None
-                                    else self.type_annotation.serialize()),
                 }  # type: JsonDict
         # TODO: initializer?
         return data
@@ -400,14 +420,15 @@ class Argument(Node):
     def deserialize(cls, data: JsonDict) -> 'Argument':
         assert data['.class'] == 'Argument'
         return Argument(Var.deserialize(data['variable']),
-                        (None if data.get('type_annotation') is None
-                         else mypy.types.Type.deserialize(data['type_annotation'])),
+                        None,
                         None,  # TODO: initializer?
                         kind=data['kind'])
 
 
 class FuncItem(FuncBase):
     arguments = []  # type: List[Argument]
+    arg_names = []  # type: List[str]
+    arg_kinds = []  # type: List[int]
     # Minimum number of arguments
     min_args = 0
     # Maximum number of positional arguments, -1 if no explicit limit (*args not included)
@@ -423,11 +444,17 @@ class FuncItem(FuncBase):
     # Variants of function with type variables with values expanded
     expanded = None  # type: List[FuncItem]
 
+    FLAGS = [
+        'is_overload', 'is_generator', 'is_coroutine', 'is_awaitable_coroutine',
+        'is_static', 'is_class',
+    ]
+
     def __init__(self, arguments: List[Argument], body: 'Block',
                  typ: 'mypy.types.FunctionLike' = None) -> None:
         self.arguments = arguments
-        arg_kinds = [arg.kind for arg in self.arguments]
-        self.max_pos = arg_kinds.count(ARG_POS) + arg_kinds.count(ARG_OPT)
+        self.arg_names = [arg.variable.name() for arg in self.arguments]
+        self.arg_kinds = [arg.kind for arg in self.arguments]
+        self.max_pos = self.arg_kinds.count(ARG_POS) + self.arg_kinds.count(ARG_OPT)
         self.body = body
         self.type = typ
         self.expanded = []
@@ -440,11 +467,10 @@ class FuncItem(FuncBase):
     def max_fixed_argc(self) -> int:
         return self.max_pos
 
-    def set_line(self, target: Union[Token, Node, int]) -> Node:
-        super().set_line(target)
+    def set_line(self, target: Union[Token, Node, int], column: int = None) -> None:
+        super().set_line(target, column)
         for arg in self.arguments:
-            arg.set_line(self.line)
-        return self
+            arg.set_line(self.line, self.column)
 
     def is_dynamic(self) -> bool:
         return self.type is None
@@ -462,6 +488,10 @@ class FuncDef(FuncItem, Statement):
     is_property = False
     original_def = None  # type: Union[None, FuncDef, Var]  # Original conditional definition
 
+    FLAGS = FuncItem.FLAGS + [
+        'is_decorated', 'is_conditional', 'is_abstract', 'is_property'
+    ]
+
     def __init__(self,
                  name: str,              # Function name
                  arguments: List[Argument],
@@ -480,20 +510,19 @@ class FuncDef(FuncItem, Statement):
         return self.info is not None and self._name == '__init__'
 
     def serialize(self) -> JsonDict:
+        # We're deliberating omitting arguments and storing only arg_names and
+        # arg_kinds for space-saving reasons (arguments is not used in later
+        # stages of mypy).
+        # TODO: After a FuncDef is deserialized, the only time we use `arg_names`
+        # and `arg_kinds` is when `type` is None and we need to infer a type. Can
+        # we store the inferred type ahead of time?
         return {'.class': 'FuncDef',
                 'name': self._name,
                 'fullname': self._fullname,
-                'arguments': [a.serialize() for a in self.arguments],
+                'arg_names': self.arg_names,
+                'arg_kinds': self.arg_kinds,
                 'type': None if self.type is None else self.type.serialize(),
-                'is_property': self.is_property,
-                'is_overload': self.is_overload,
-                'is_generator': self.is_generator,
-                'is_coroutine': self.is_coroutine,
-                'is_static': self.is_static,
-                'is_class': self.is_class,
-                'is_decorated': self.is_decorated,
-                'is_conditional': self.is_conditional,
-                'is_abstract': self.is_abstract,
+                'flags': get_flags(self, FuncDef.FLAGS),
                 # TODO: Do we need expanded, original_def?
                 }
 
@@ -502,21 +531,19 @@ class FuncDef(FuncItem, Statement):
         assert data['.class'] == 'FuncDef'
         body = Block([])
         ret = FuncDef(data['name'],
-                      [Argument.deserialize(a) for a in data['arguments']],
+                      [],
                       body,
                       (None if data['type'] is None
                        else mypy.types.FunctionLike.deserialize(data['type'])))
         ret._fullname = data['fullname']
-        ret.is_property = data['is_property']
-        ret.is_overload = data['is_overload']
-        ret.is_generator = data['is_generator']
-        ret.is_coroutine = data['is_coroutine']
-        ret.is_static = data['is_static']
-        ret.is_class = data['is_class']
-        ret.is_decorated = data['is_decorated']
-        ret.is_conditional = data['is_conditional']
-        ret.is_abstract = data['is_abstract']
+        set_flags(ret, data['flags'])
         # NOTE: ret.info is set in the fixup phase.
+        ret.arg_names = data['arg_names']
+        ret.arg_kinds = data['arg_kinds']
+        # Mark these as 'None' so that future uses will trigger an error
+        ret.arguments = None
+        ret.max_pos = None
+        ret.min_args = None
         return ret
 
 
@@ -583,6 +610,14 @@ class Var(SymbolNode, Statement):
     is_classmethod = False
     is_property = False
     is_settable_property = False
+    # Set to true when this variable refers to a module we were unable to
+    # parse for some reason (eg a silenced module)
+    is_suppressed_import = False
+
+    FLAGS = [
+        'is_self', 'is_ready', 'is_initialized_in_class', 'is_staticmethod',
+        'is_classmethod', 'is_property', 'is_settable_property', 'is_suppressed_import'
+    ]
 
     def __init__(self, name: str, type: 'mypy.types.Type' = None) -> None:
         self._name = name
@@ -607,12 +642,7 @@ class Var(SymbolNode, Statement):
                 'name': self._name,
                 'fullname': self._fullname,
                 'type': None if self.type is None else self.type.serialize(),
-                'is_self': self.is_self,
-                'is_initialized_in_class': self.is_initialized_in_class,
-                'is_staticmethod': self.is_staticmethod,
-                'is_classmethod': self.is_classmethod,
-                'is_property': self.is_property,
-                'is_settable_property': self.is_settable_property,
+                'flags': get_flags(self, Var.FLAGS),
                 }  # type: JsonDict
         return data
 
@@ -623,12 +653,7 @@ class Var(SymbolNode, Statement):
         type = None if data['type'] is None else mypy.types.Type.deserialize(data['type'])
         v = Var(name, type)
         v._fullname = data['fullname']
-        v.is_self = data['is_self']
-        v.is_initialized_in_class = data['is_initialized_in_class']
-        v.is_staticmethod = data['is_staticmethod']
-        v.is_classmethod = data['is_classmethod']
-        v.is_property = data['is_property']
-        v.is_settable_property = data['is_settable_property']
+        set_flags(v, data['flags'])
         return v
 
 
@@ -646,6 +671,7 @@ class ClassDef(Statement):
     decorators = None  # type: List[Expression]
     # Built-in/extension class? (single implementation inheritance only)
     is_builtinclass = False
+    has_incompatible_baseclass = False
 
     def __init__(self,
                  name: str,
@@ -750,16 +776,19 @@ class AssignmentStmt(Statement):
     An lvalue can be NameExpr, TupleExpr, ListExpr, MemberExpr, IndexExpr.
     """
 
-    lvalues = None  # type: List[Expression]
+    lvalues = None  # type: List[Lvalue]
     rvalue = None  # type: Expression
     # Declared type in a comment, may be None.
     type = None  # type: mypy.types.Type
+    # This indicates usage of PEP 526 type annotation syntax in assignment.
+    new_syntax = False  # type: bool
 
-    def __init__(self, lvalues: List[Expression], rvalue: Expression,
-                 type: 'mypy.types.Type' = None) -> None:
+    def __init__(self, lvalues: List[Lvalue], rvalue: Expression,
+                 type: 'mypy.types.Type' = None, new_syntax: bool = False) -> None:
         self.lvalues = lvalues
         self.rvalue = rvalue
         self.type = type
+        self.new_syntax = new_syntax
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_assignment_stmt(self)
@@ -1103,8 +1132,6 @@ class NameExpr(RefExpr):
     """
 
     name = None  # type: str      # Name referred to (may be qualified)
-    # TypeInfo of class surrounding expression (may be None)
-    info = None  # type: TypeInfo
 
     literal = LITERAL_TYPE
 
@@ -1191,6 +1218,7 @@ class CallExpr(Expression):
                  arg_names: List[str] = None, analyzed: Expression = None) -> None:
         if not arg_names:
             arg_names = [None] * len(args)
+
         self.callee = callee
         self.args = args
         self.arg_kinds = arg_kinds
@@ -1721,22 +1749,25 @@ class PromoteExpr(Expression):
 
 class NewTypeExpr(Expression):
     """NewType expression NewType(...)."""
+    name = None  # type: str
+    old_type = None  # type: mypy.types.Type
 
     info = None  # type: Optional[TypeInfo]
 
-    def __init__(self, info: Optional['TypeInfo']) -> None:
-        self.info = info
+    def __init__(self, name: str, old_type: 'mypy.types.Type', line: int) -> None:
+        self.name = name
+        self.old_type = old_type
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_newtype_expr(self)
 
 
-class AwaitExpr(Node):
+class AwaitExpr(Expression):
     """Await expression (await ...)."""
 
-    expr = None  # type: Node
+    expr = None  # type: Expression
 
-    def __init__(self, expr: Node) -> None:
+    def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
@@ -1759,6 +1790,9 @@ class TempNode(Expression):
     def __init__(self, typ: 'mypy.types.Type') -> None:
         self.type = typ
 
+    def __repr__(self):
+        return 'TempNode(%s)' % str(self.type)
+
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_temp_node(self)
 
@@ -1777,6 +1811,10 @@ class TypeInfo(SymbolNode):
     """
 
     _fullname = None  # type: str          # Fully qualified name
+    # Fully qualified name for the module this type was defined in. This
+    # information is also in the fullname, but is harder to extract in the
+    # case of nested class definitions.
+    module_name = None  # type: str
     defn = None  # type: ClassDef          # Corresponding ClassDef
     # Method Resolution Order: the order of looking up attributes. The first
     # value always to refers to this class.
@@ -1828,10 +1866,16 @@ class TypeInfo(SymbolNode):
     # Alternative to fullname() for 'anonymous' classes.
     alt_fullname = None  # type: Optional[str]
 
-    def __init__(self, names: 'SymbolTable', defn: ClassDef) -> None:
+    FLAGS = [
+        'is_abstract', 'is_enum', 'fallback_to_any', 'is_named_tuple',
+        'is_newtype', 'is_dummy'
+    ]
+
+    def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> None:
         """Initialize a TypeInfo."""
         self.names = names
         self.defn = defn
+        self.module_name = module_name
         self.subtypes = set()
         self.type_vars = []
         self.bases = []
@@ -1985,20 +2029,17 @@ class TypeInfo(SymbolNode):
     def serialize(self) -> Union[str, JsonDict]:
         # NOTE: This is where all ClassDefs originate, so there shouldn't be duplicates.
         data = {'.class': 'TypeInfo',
+                'module_name': self.module_name,
                 'fullname': self.fullname(),
                 'alt_fullname': self.alt_fullname,
                 'names': self.names.serialize(self.alt_fullname or self.fullname()),
                 'defn': self.defn.serialize(),
-                'is_abstract': self.is_abstract,
                 'abstract_attributes': self.abstract_attributes,
-                'is_enum': self.is_enum,
-                'fallback_to_any': self.fallback_to_any,
                 'type_vars': self.type_vars,
                 'bases': [b.serialize() for b in self.bases],
                 '_promote': None if self._promote is None else self._promote.serialize(),
                 'tuple_type': None if self.tuple_type is None else self.tuple_type.serialize(),
-                'is_named_tuple': self.is_named_tuple,
-                'is_newtype': self.is_newtype,
+                'flags': get_flags(self, TypeInfo.FLAGS),
                 }
         return data
 
@@ -2006,22 +2047,19 @@ class TypeInfo(SymbolNode):
     def deserialize(cls, data: JsonDict) -> 'TypeInfo':
         names = SymbolTable.deserialize(data['names'])
         defn = ClassDef.deserialize(data['defn'])
-        ti = TypeInfo(names, defn)
+        module_name = data['module_name']
+        ti = TypeInfo(names, defn, module_name)
         ti._fullname = data['fullname']
         ti.alt_fullname = data['alt_fullname']
         # TODO: Is there a reason to reconstruct ti.subtypes?
-        ti.is_abstract = data['is_abstract']
         ti.abstract_attributes = data['abstract_attributes']
-        ti.is_enum = data['is_enum']
-        ti.fallback_to_any = data['fallback_to_any']
         ti.type_vars = data['type_vars']
         ti.bases = [mypy.types.Instance.deserialize(b) for b in data['bases']]
         ti._promote = (None if data['_promote'] is None
                        else mypy.types.Type.deserialize(data['_promote']))
         ti.tuple_type = (None if data['tuple_type'] is None
                          else mypy.types.TupleType.deserialize(data['tuple_type']))
-        ti.is_named_tuple = data['is_named_tuple']
-        ti.is_newtype = data['is_newtype']
+        set_flags(ti, data['flags'])
         return ti
 
 
@@ -2203,14 +2241,11 @@ def function_type(func: FuncBase, fallback: 'mypy.types.Instance') -> 'mypy.type
         name = func.name()
         if name:
             name = '"{}"'.format(name)
-        names = []  # type: List[str]
-        for arg in fdef.arguments:
-            names.append(arg.variable.name())
 
         return mypy.types.CallableType(
-            [mypy.types.AnyType()] * len(fdef.arguments),
-            [arg.kind for arg in fdef.arguments],
-            names,
+            [mypy.types.AnyType()] * len(fdef.arg_names),
+            fdef.arg_kinds,
+            fdef.arg_names,
             mypy.types.AnyType(),
             fallback,
             name,
@@ -2280,3 +2315,12 @@ def merge(seqs: List[List[TypeInfo]]) -> List[TypeInfo]:
         for s in seqs:
             if s[0] is head:
                 del s[0]
+
+
+def get_flags(node: Node, names: List[str]) -> List[str]:
+    return [name for name in names if getattr(node, name)]
+
+
+def set_flags(node: Node, flags: List[str]) -> None:
+    for name in flags:
+        setattr(node, name, True)
diff --git a/mypy/options.py b/mypy/options.py
index 9718166..1d09456 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -1,7 +1,10 @@
-from mypy import defaults
+import fnmatch
 import pprint
 import sys
-from typing import Any, Optional, Tuple
+
+from typing import Any, Mapping, Optional, Tuple, List
+
+from mypy import defaults
 
 
 class BuildType:
@@ -13,6 +16,19 @@ class BuildType:
 class Options:
     """Options collected from flags."""
 
+    PER_FILE_OPTIONS = {
+        "silent_imports",
+        "almost_silent",
+        "disallow_untyped_calls",
+        "disallow_untyped_defs",
+        "check_untyped_defs",
+        "debug_cache",
+        "strict_optional_whitelist",
+        "show_none_errors",
+    }
+
+    OPTIONS_AFFECTING_CACHE = PER_FILE_OPTIONS | {"strict_optional"}
+
     def __init__(self) -> None:
         # -- build options --
         self.build_type = BuildType.STANDARD
@@ -32,6 +48,9 @@ class Options:
         # Type check unannotated functions
         self.check_untyped_defs = False
 
+        # Disallow subclassing values of type 'Any'
+        self.disallow_subclassing_any = False
+
         # Also check typeshed for missing annotations
         self.warn_incomplete_stub = False
 
@@ -40,6 +59,26 @@ class Options:
 
         # Warn about unused '# type: ignore' comments
         self.warn_unused_ignores = False
+
+        # Apply strict None checking
+        self.strict_optional = False
+
+        # Files in which to allow strict-Optional related errors
+        # TODO: Kill this in favor of show_none_errors
+        self.strict_optional_whitelist = None   # type: Optional[List[str]]
+
+        # Alternate way to show/hide strict-None-checking related errors
+        self.show_none_errors = True
+
+        # Use script name instead of __main__
+        self.scripts_are_modules = False
+
+        # Config file name
+        self.config_file = None  # type: Optional[str]
+
+        # Per-file options (raw)
+        self.per_file_options = {}  # type: Dict[str, Dict[str, object]]
+
         # -- development options --
         self.verbosity = 0  # More verbose messages (for troubleshooting)
         self.pdb = False
@@ -57,9 +96,11 @@ class Options:
         # -- experimental options --
         self.fast_parser = False
         self.incremental = False
-        self.cache_dir = defaults.MYPY_CACHE
-        self.suppress_error_context = False  # Suppress "note: In function "foo":" messages.
+        self.cache_dir = defaults.CACHE_DIR
+        self.debug_cache = False
+        self.hide_error_context = False  # Hide "note: In function "foo":" messages.
         self.shadow_file = None  # type: Optional[Tuple[str, str]]
+        self.show_column_numbers = False  # type: bool
 
     def __eq__(self, other: object) -> bool:
         return self.__class__ == other.__class__ and self.__dict__ == other.__dict__
@@ -69,3 +110,18 @@ class Options:
 
     def __repr__(self) -> str:
         return 'Options({})'.format(pprint.pformat(self.__dict__))
+
+    def clone_for_file(self, filename: str) -> 'Options':
+        updates = {}
+        for glob in self.per_file_options:
+            if fnmatch.fnmatch(filename, glob):
+                updates.update(self.per_file_options[glob])
+        if not updates:
+            return self
+        new_options = Options()
+        new_options.__dict__.update(self.__dict__)
+        new_options.__dict__.update(updates)
+        return new_options
+
+    def select_options_affecting_cache(self) -> Mapping[str, bool]:
+        return {opt: getattr(self, opt) for opt in self.OPTIONS_AFFECTING_CACHE}
diff --git a/mypy/parse.py b/mypy/parse.py
index 94d7b7d..5739056 100644
--- a/mypy/parse.py
+++ b/mypy/parse.py
@@ -6,7 +6,7 @@ representing a source file. Performs only minimal semantic checks.
 
 import re
 
-from typing import List, Tuple, Any, Set, cast, Union, Optional
+from typing import List, Tuple, Set, cast, Union, Optional
 
 from mypy import lex
 from mypy.lex import (
@@ -14,13 +14,12 @@ from mypy.lex import (
     UnicodeLit, FloatLit, Op, Indent, Keyword, Punct, LexError, ComplexLit,
     EllipsisToken
 )
-import mypy.types
 from mypy.nodes import (
-    MypyFile, Import, Node, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef,
-    ClassDef, Decorator, Block, Var, OperatorAssignmentStmt,
+    MypyFile, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef,
+    ClassDef, Decorator, Block, Var, OperatorAssignmentStmt, Statement,
     ExpressionStmt, AssignmentStmt, ReturnStmt, RaiseStmt, AssertStmt,
     DelStmt, BreakStmt, ContinueStmt, PassStmt, GlobalDecl,
-    WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt,
+    WhileStmt, ForStmt, IfStmt, TryStmt, WithStmt, Expression,
     TupleExpr, GeneratorExpr, ListComprehension, ListExpr, ConditionalExpr,
     DictExpr, SetExpr, NameExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr,
     FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr,
@@ -34,7 +33,7 @@ from mypy import nodes
 from mypy.errors import Errors, CompileError
 from mypy.types import Type, CallableType, AnyType, UnboundType
 from mypy.parsetype import (
-    parse_type, parse_types, parse_signature, TypeParseError, parse_str_as_signature
+    parse_type, parse_types, parse_signature, TypeParseError
 )
 from mypy.options import Options
 
@@ -82,15 +81,23 @@ def parse(source: Union[str, bytes],
 
     The python_version (major, minor) option determines the Python syntax variant.
     """
+    is_stub_file = bool(fnam) and fnam.endswith('.pyi')
     if options.fast_parser:
-        import mypy.fastparse
-        return mypy.fastparse.parse(source,
-                                    fnam=fnam,
-                                    errors=errors,
-                                    pyversion=options.python_version,
-                                    custom_typing_module=options.custom_typing_module)
+        if options.python_version[0] >= 3 or is_stub_file:
+            import mypy.fastparse
+            return mypy.fastparse.parse(source,
+                                        fnam=fnam,
+                                        errors=errors,
+                                        pyversion=options.python_version,
+                                        custom_typing_module=options.custom_typing_module)
+        else:
+            import mypy.fastparse2
+            return mypy.fastparse2.parse(source,
+                                         fnam=fnam,
+                                         errors=errors,
+                                         pyversion=options.python_version,
+                                         custom_typing_module=options.custom_typing_module)
 
-    is_stub_file = bool(fnam) and fnam.endswith('.pyi')
     parser = Parser(fnam,
                     errors,
                     options.python_version,
@@ -152,33 +159,12 @@ class Parser:
             self.errors.raise_error()
         return file
 
-    def weak_opts(self) -> Set[str]:
-        """Do weak typing if any of the first ten tokens is a comment saying so.
-
-        The comment can be one of:
-        # mypy: weak=global
-        # mypy: weak=local
-        # mypy: weak      <- defaults to local
-        """
-        regexp = re.compile(r'^[\s]*# *mypy: *weak(=?)([^\s]*)', re.M)
-        for t in self.tok[:10]:
-            for s in [t.string, t.pre]:
-                m = regexp.search(s)
-                if m:
-                    opts = set(x for x in m.group(2).split(',') if x)
-                    if not opts:
-                        opts.add('local')
-                    return opts
-        return set()
-
     def parse_file(self) -> MypyFile:
         """Parse a mypy source file."""
         is_bom = self.parse_bom()
         defs = self.parse_defs()
-        weak_opts = self.weak_opts()
         self.expect_type(Eof)
-        node = MypyFile(defs, self.imports, is_bom, self.ignored_lines,
-                        weak_opts=weak_opts)
+        node = MypyFile(defs, self.imports, is_bom, self.ignored_lines)
         return node
 
     # Parse the initial part
@@ -227,7 +213,7 @@ class Parser:
             return 'builtins'
         return id
 
-    def parse_import_from(self) -> Node:
+    def parse_import_from(self) -> ImportBase:
         self.expect('from')
 
         # Build the list of beginning relative tokens.
@@ -265,7 +251,7 @@ class Parser:
                     if targets or self.current_str() == ',':
                         self.fail('You cannot import any other modules when you '
                                   'import a custom typing module',
-                                  self.current().line)
+                                  self.current().line, self.current().column)
                     node = Import([('typing', as_id)])
                     self.skip_until_break()
                     break
@@ -310,8 +296,8 @@ class Parser:
 
     # Parsing global definitions
 
-    def parse_defs(self) -> List[Node]:
-        defs = []  # type: List[Node]
+    def parse_defs(self) -> List[Statement]:
+        defs = []  # type: List[Statement]
         while not self.eof():
             try:
                 defn, is_simple = self.parse_statement()
@@ -332,7 +318,7 @@ class Parser:
         metaclass = None
 
         try:
-            base_types = []  # type: List[Node]
+            base_types = []  # type: List[Expression]
             try:
                 name_tok = self.expect_type(Name)
                 name = name_tok.string
@@ -383,10 +369,10 @@ class Parser:
                 break
         return metaclass
 
-    def parse_super_type(self) -> Node:
+    def parse_super_type(self) -> Expression:
         return self.parse_expression(precedence[','])
 
-    def parse_decorated_function_or_class(self) -> Node:
+    def parse_decorated_function_or_class(self) -> Union[Decorator, ClassDef]:
         decorators = []
         no_type_checks = False
         while self.current_str() == '@':
@@ -410,7 +396,7 @@ class Parser:
             cls.decorators = decorators
             return cls
 
-    def is_no_type_check_decorator(self, expr: Node) -> bool:
+    def is_no_type_check_decorator(self, expr: Expression) -> bool:
         if isinstance(expr, NameExpr):
             return expr.name == 'no_type_check'
         elif isinstance(expr, MemberExpr):
@@ -419,7 +405,7 @@ class Parser:
         else:
             return False
 
-    def parse_function(self, no_type_checks: bool=False) -> FuncDef:
+    def parse_function(self, no_type_checks: bool = False) -> FuncDef:
         def_tok = self.expect('def')
         is_method = self.is_class_body
         self.is_class_body = False
@@ -437,7 +423,7 @@ class Parser:
                 # The function has a # type: ... signature.
                 if typ:
                     self.errors.report(
-                        def_tok.line, 'Function has duplicate type signatures')
+                        def_tok.line, def_tok.column, 'Function has duplicate type signatures')
                 sig = cast(CallableType, comment_type)
                 if sig.is_ellipsis_args:
                     # When we encounter an ellipsis, fill in the arg_types with
@@ -449,11 +435,12 @@ class Parser:
                         arg_names,
                         sig.ret_type,
                         None,
-                        line=def_tok.line)
+                        line=def_tok.line,
+                        column=def_tok.column)
                 elif is_method and len(sig.arg_kinds) < len(arg_kinds):
                     self.check_argument_kinds(arg_kinds,
                                               [nodes.ARG_POS] + sig.arg_kinds,
-                                              def_tok.line)
+                                              def_tok.line, def_tok.column)
                     # Add implicit 'self' argument to signature.
                     first_arg = [AnyType()]  # type: List[Type]
                     typ = CallableType(
@@ -462,17 +449,19 @@ class Parser:
                         arg_names,
                         sig.ret_type,
                         None,
-                        line=def_tok.line)
+                        line=def_tok.line,
+                        column=def_tok.column)
                 else:
                     self.check_argument_kinds(arg_kinds, sig.arg_kinds,
-                                              def_tok.line)
+                                              def_tok.line, def_tok.column)
                     typ = CallableType(
                         sig.arg_types,
                         arg_kinds,
                         arg_names,
                         sig.ret_type,
                         None,
-                        line=def_tok.line)
+                        line=def_tok.line,
+                        column=def_tok.column)
 
             # If there was a serious error, we really cannot build a parse tree
             # node.
@@ -496,7 +485,7 @@ class Parser:
             self.is_class_body = is_method
 
     def check_argument_kinds(self, funckinds: List[int], sigkinds: List[int],
-                             line: int) -> None:
+                             line: int, column: int) -> None:
         """Check that arguments are consistent.
 
         This verifies that they have the same number and the kinds correspond.
@@ -507,9 +496,9 @@ class Parser:
         """
         if len(funckinds) != len(sigkinds):
             if len(funckinds) > len(sigkinds):
-                self.fail("Type signature has too few arguments", line)
+                self.fail("Type signature has too few arguments", line, column)
             else:
-                self.fail("Type signature has too many arguments", line)
+                self.fail("Type signature has too many arguments", line, column)
             return
         for kind, token in [(nodes.ARG_STAR, '*'),
                             (nodes.ARG_STAR2, '**')]:
@@ -517,7 +506,7 @@ class Parser:
                     (kind in funckinds and sigkinds.index(kind) != funckinds.index(kind))):
                 self.fail(
                     "Inconsistent use of '{}' in function "
-                    "signature".format(token), line)
+                    "signature".format(token), line, column)
 
     def parse_function_header(
             self, no_type_checks: bool=False) -> Tuple[str,
@@ -580,21 +569,21 @@ class Parser:
             ret_type = None
 
         arg_kinds = [arg.kind for arg in args]
-        self.verify_argument_kinds(arg_kinds, lparen.line)
+        self.verify_argument_kinds(arg_kinds, lparen.line, lparen.column)
 
         annotation = self.build_func_annotation(
-            ret_type, args, lparen.line)
+            ret_type, args, lparen.line, lparen.column)
 
         return args, annotation, extra_stmts
 
     def build_func_annotation(self, ret_type: Type, args: List[Argument],
-            line: int, is_default_ret: bool = False) -> CallableType:
+            line: int, column: int, is_default_ret: bool = False) -> CallableType:
         arg_types = [arg.type_annotation for arg in args]
         # Are there any type annotations?
         if ((ret_type and not is_default_ret)
                 or arg_types != [None] * len(arg_types)):
             # Yes. Construct a type for the function signature.
-            return self.construct_function_type(args, ret_type, line)
+            return self.construct_function_type(args, ret_type, line, column)
         else:
             return None
 
@@ -680,7 +669,7 @@ class Parser:
         for name in names:
             if name in found:
                 self.fail('Duplicate argument name "{}"'.format(name),
-                          self.current().line)
+                          self.current().line, self.current().column)
             found.add(name)
 
     def parse_asterisk_arg(self,
@@ -717,10 +706,11 @@ class Parser:
         However, if the argument is (x,) then it *is* a (singleton) tuple.
         """
         line = self.current().line
+        column = self.current().column
         # Generate a new argument name that is very unlikely to clash with anything.
         arg_name = '__tuple_arg_{}'.format(index + 1)
         if self.pyversion[0] >= 3:
-            self.fail('Tuples in argument lists only supported in Python 2 mode', line)
+            self.fail('Tuples in argument lists only supported in Python 2 mode', line, column)
         paren_arg = self.parse_parentheses()
         self.verify_tuple_arg(paren_arg)
         if isinstance(paren_arg, NameExpr):
@@ -731,7 +721,7 @@ class Parser:
             rvalue = NameExpr(arg_name)
             rvalue.set_line(line)
             decompose = AssignmentStmt([paren_arg], rvalue)
-            decompose.set_line(line)
+            decompose.set_line(line, column)
         kind = nodes.ARG_POS
         initializer = None
         if self.current_str() == '=':
@@ -742,16 +732,16 @@ class Parser:
         arg_names = self.find_tuple_arg_argument_names(paren_arg)
         return Argument(var, None, initializer, kind), decompose, arg_names
 
-    def verify_tuple_arg(self, paren_arg: Node) -> None:
+    def verify_tuple_arg(self, paren_arg: Expression) -> None:
         if isinstance(paren_arg, TupleExpr):
             if not paren_arg.items:
-                self.fail('Empty tuple not valid as an argument', paren_arg.line)
+                self.fail('Empty tuple not valid as an argument', paren_arg.line, paren_arg.column)
             for item in paren_arg.items:
                 self.verify_tuple_arg(item)
         elif not isinstance(paren_arg, NameExpr):
-            self.fail('Invalid item in tuple argument', paren_arg.line)
+            self.fail('Invalid item in tuple argument', paren_arg.line, paren_arg.column)
 
-    def find_tuple_arg_argument_names(self, node: Node) -> List[str]:
+    def find_tuple_arg_argument_names(self, node: Expression) -> List[str]:
         result = []  # type: List[str]
         if isinstance(node, TupleExpr):
             for item in node.items:
@@ -772,7 +762,7 @@ class Parser:
         else:
             type = self.parse_arg_type(allow_signature)
 
-        initializer = None  # type: Node
+        initializer = None  # type: Expression
         if self.current_str() == '=':
             self.expect('=')
             initializer = self.parse_expression(precedence[','])
@@ -788,7 +778,7 @@ class Parser:
 
         return Argument(variable, type, initializer, kind), require_named
 
-    def set_type_optional(self, type: Type, initializer: Node) -> None:
+    def set_type_optional(self, type: Type, initializer: Expression) -> None:
         if not experiments.STRICT_OPTIONAL:
             return
         # Indicate that type should be wrapped in an Optional if arg is initialized to None.
@@ -796,7 +786,7 @@ class Parser:
         if isinstance(type, UnboundType):
             type.optional = optional
 
-    def parse_parameter_annotation(self) -> Node:
+    def parse_parameter_annotation(self) -> Expression:
         if self.current_str() == ':':
             self.skip()
             return self.parse_expression(precedence[','])
@@ -808,21 +798,21 @@ class Parser:
         else:
             return None
 
-    def verify_argument_kinds(self, kinds: List[int], line: int) -> None:
+    def verify_argument_kinds(self, kinds: List[int], line: int, column: int) -> None:
         found = set()  # type: Set[int]
         for i, kind in enumerate(kinds):
             if kind == nodes.ARG_POS and found & set([nodes.ARG_OPT,
                                                       nodes.ARG_STAR,
                                                       nodes.ARG_STAR2]):
-                self.fail('Invalid argument list', line)
+                self.fail('Invalid argument list', line, column)
             elif kind == nodes.ARG_STAR and nodes.ARG_STAR in found:
-                self.fail('Invalid argument list', line)
+                self.fail('Invalid argument list', line, column)
             elif kind == nodes.ARG_STAR2 and i != len(kinds) - 1:
-                self.fail('Invalid argument list', line)
+                self.fail('Invalid argument list', line, column)
             found.add(kind)
 
     def construct_function_type(self, args: List[Argument], ret_type: Type,
-                                line: int) -> CallableType:
+                                line: int, column: int) -> CallableType:
         # Complete the type annotation by replacing omitted types with 'Any'.
         arg_types = [arg.type_annotation for arg in args]
         for i in range(len(arg_types)):
@@ -833,7 +823,7 @@ class Parser:
         arg_kinds = [arg.kind for arg in args]
         arg_names = [arg.variable.name() for arg in args]
         return CallableType(arg_types, arg_kinds, arg_names, ret_type, None, name=None,
-                        variables=None, line=line)
+                        variables=None, line=line, column=column)
 
     # Parsing statements
 
@@ -860,7 +850,7 @@ class Parser:
             brk = self.expect_break()
             type = self.parse_type_comment(brk, signature=True)
             self.expect_indent()
-            stmt_list = []  # type: List[Node]
+            stmt_list = []  # type: List[Statement]
             while (not isinstance(self.current(), Dedent) and
                    not isinstance(self.current(), Eof)):
                 try:
@@ -878,7 +868,7 @@ class Parser:
             node.set_line(colon)
             return node, type
 
-    def try_combine_overloads(self, s: Node, stmt: List[Node]) -> bool:
+    def try_combine_overloads(self, s: Statement, stmt: List[Statement]) -> bool:
         if isinstance(s, Decorator) and stmt:
             fdef = s
             n = fdef.func.name()
@@ -890,8 +880,8 @@ class Parser:
                 return True
         return False
 
-    def parse_statement(self) -> Tuple[Node, bool]:
-        stmt = None  # type: Node
+    def parse_statement(self) -> Tuple[Statement, bool]:
+        stmt = None  # type: Statement
         t = self.current()
         ts = self.current_str()
         is_simple = True  # Is this a non-block statement?
@@ -956,7 +946,9 @@ class Parser:
             stmt.set_line(t)
         return stmt, is_simple
 
-    def parse_expression_or_assignment(self) -> Node:
+    def parse_expression_or_assignment(self) -> Union[AssignmentStmt,
+                                                      OperatorAssignmentStmt,
+                                                      ExpressionStmt]:
         expr = self.parse_expression(star_expr_allowed=True)
         if self.current_str() == '=':
             return self.parse_assignment(expr)
@@ -970,7 +962,7 @@ class Parser:
             # Expression statement.
             return ExpressionStmt(expr)
 
-    def parse_assignment(self, lvalue: Any) -> Node:
+    def parse_assignment(self, lvalue: Expression) -> AssignmentStmt:
         """Parse an assignment statement.
 
         Assume that lvalue has been parsed already, and the current token is '='.
@@ -1120,7 +1112,7 @@ class Parser:
         node = ForStmt(index, expr, body, else_body)
         return node
 
-    def parse_for_index_variables(self) -> Node:
+    def parse_for_index_variables(self) -> Expression:
         # Parse index variables of a 'for' statement.
         index_items = []
         force_tuple = False
@@ -1140,7 +1132,7 @@ class Parser:
             index = index_items[0]
         else:
             index = TupleExpr(index_items)
-            index.set_line(index_items[0].get_line())
+            index.set_line(index_items[0])
 
         return index
 
@@ -1176,19 +1168,21 @@ class Parser:
         else:
             return None
 
-    def parse_try_stmt(self) -> Node:
+    def parse_try_stmt(self) -> TryStmt:
         self.expect('try')
         body, _ = self.parse_block()
         is_error = False
         vars = []  # type: List[NameExpr]
-        types = []  # type: List[Node]
+        types = []  # type: List[Optional[Expression]]
         handlers = []  # type: List[Block]
         while self.current_str() == 'except':
             self.expect('except')
             if not isinstance(self.current(), Colon):
                 try:
                     t = self.current()
-                    types.append(self.parse_expression(precedence[',']).set_line(t))
+                    expr = self.parse_expression(precedence[','])
+                    expr.set_line(t)
+                    types.append(expr)
                     if self.current_str() == 'as':
                         self.expect('as')
                         vars.append(self.parse_name_expr())
@@ -1279,9 +1273,9 @@ class Parser:
 
     # Parsing expressions
 
-    def parse_expression(self, prec: int = 0, star_expr_allowed: bool = False) -> Node:
+    def parse_expression(self, prec: int = 0, star_expr_allowed: bool = False) -> Expression:
         """Parse a subexpression within a specific precedence context."""
-        expr = None  # type: Node
+        expr = None  # type: Expression
         current = self.current()  # Remember token for setting the line number.
 
         # Parse a "value" expression or unary operator expression and store
@@ -1401,18 +1395,18 @@ class Parser:
 
         return expr
 
-    def parse_parentheses(self) -> Node:
+    def parse_parentheses(self) -> Expression:
         self.skip()
         if self.current_str() == ')':
             # Empty tuple ().
-            expr = self.parse_empty_tuple_expr()  # type: Node
+            expr = self.parse_empty_tuple_expr()  # type: Expression
         else:
             # Parenthesised expression.
             expr = self.parse_expression(0, star_expr_allowed=True)
             self.expect(')')
         return expr
 
-    def parse_star_expr(self) -> Node:
+    def parse_star_expr(self) -> StarExpr:
         star = self.expect('*')
         expr = self.parse_expression(precedence['*u'])
         expr = StarExpr(expr)
@@ -1425,7 +1419,7 @@ class Parser:
         node = TupleExpr([])
         return node
 
-    def parse_list_expr(self) -> Node:
+    def parse_list_expr(self) -> Union[ListExpr, ListComprehension]:
         """Parse list literal or list comprehension."""
         items = []
         self.expect('[')
@@ -1443,7 +1437,7 @@ class Parser:
             expr = ListExpr(items)
             return expr
 
-    def parse_generator_expr(self, left_expr: Node) -> GeneratorExpr:
+    def parse_generator_expr(self, left_expr: Expression) -> GeneratorExpr:
         tok = self.current()
         indices, sequences, condlists = self.parse_comp_for()
 
@@ -1451,10 +1445,10 @@ class Parser:
         gen.set_line(tok)
         return gen
 
-    def parse_comp_for(self) -> Tuple[List[Node], List[Node], List[List[Node]]]:
+    def parse_comp_for(self) -> Tuple[List[Expression], List[Expression], List[List[Expression]]]:
         indices = []
         sequences = []
-        condlists = []  # type: List[List[Node]]
+        condlists = []  # type: List[List[Expression]]
         while self.current_str() == 'for':
             conds = []
             self.expect('for')
@@ -1473,24 +1467,27 @@ class Parser:
 
         return indices, sequences, condlists
 
-    def parse_expression_list(self) -> Node:
+    def parse_expression_list(self) -> Expression:
         prec = precedence['<if>']
         expr = self.parse_expression(prec)
         if self.current_str() != ',':
             return expr
         else:
             t = self.current()
-            return self.parse_tuple_expr(expr, prec).set_line(t)
+            tuple_expr = self.parse_tuple_expr(expr, prec)
+            tuple_expr.set_line(t)
+            return tuple_expr
 
-    def parse_conditional_expr(self, left_expr: Node) -> ConditionalExpr:
+    def parse_conditional_expr(self, left_expr: Expression) -> ConditionalExpr:
         self.expect('if')
         cond = self.parse_expression(precedence['<if>'])
         self.expect('else')
         else_expr = self.parse_expression(precedence['<if>'])
         return ConditionalExpr(cond, left_expr, else_expr)
 
-    def parse_dict_or_set_expr(self) -> Node:
-        items = []  # type: List[Tuple[Node, Node]]
+    def parse_dict_or_set_expr(self) -> Union[SetComprehension, SetExpr,
+                                              DictionaryComprehension, DictExpr]:
+        items = []  # type: List[Tuple[Expression, Expression]]
         self.expect('{')
         while self.current_str() != '}' and not self.eol():
             key = self.parse_expression(precedence['<for>'])
@@ -1512,7 +1509,7 @@ class Parser:
         node = DictExpr(items)
         return node
 
-    def parse_set_expr(self, first: Node) -> SetExpr:
+    def parse_set_expr(self, first: Expression) -> SetExpr:
         items = [first]
         while self.current_str() != '}' and not self.eol():
             self.expect(',')
@@ -1523,13 +1520,13 @@ class Parser:
         expr = SetExpr(items)
         return expr
 
-    def parse_set_comprehension(self, expr: Node) -> SetComprehension:
+    def parse_set_comprehension(self, expr: Expression) -> SetComprehension:
         gen = self.parse_generator_expr(expr)
         self.expect('}')
         set_comp = SetComprehension(gen)
         return set_comp
 
-    def parse_dict_comprehension(self, key: Node, value: Node,
+    def parse_dict_comprehension(self, key: Expression, value: Expression,
                                  colon: Token) -> DictionaryComprehension:
         indices, sequences, condlists = self.parse_comp_for()
         dic = DictionaryComprehension(key, value, indices, sequences, condlists)
@@ -1537,7 +1534,7 @@ class Parser:
         self.expect('}')
         return dic
 
-    def parse_tuple_expr(self, expr: Node,
+    def parse_tuple_expr(self, expr: Expression,
                          prec: int = precedence[',']) -> TupleExpr:
         items = [expr]
         while True:
@@ -1574,7 +1571,7 @@ class Parser:
         node = IntExpr(value)
         return node
 
-    def parse_str_expr(self) -> Node:
+    def parse_str_expr(self) -> Union[UnicodeExpr, StrExpr]:
         # XXX \uxxxx literals
         token = self.expect_type(StrLit)
         value = cast(StrLit, token).parsed()
@@ -1587,12 +1584,11 @@ class Parser:
                 value += token.parsed()
                 is_unicode = True
         if is_unicode or (self.pyversion[0] == 2 and 'unicode_literals' in self.future_options):
-            node = UnicodeExpr(value)  # type: Node
+            return UnicodeExpr(value)
         else:
-            node = StrExpr(value)
-        return node
+            return StrExpr(value)
 
-    def parse_bytes_literal(self) -> Node:
+    def parse_bytes_literal(self) -> Union[BytesExpr, StrExpr]:
         # XXX \uxxxx literals
         tok = [self.expect_type(BytesLit)]
         value = (cast(BytesLit, tok[0])).parsed()
@@ -1600,12 +1596,11 @@ class Parser:
             t = cast(BytesLit, self.skip())
             value += t.parsed()
         if self.pyversion[0] >= 3:
-            node = BytesExpr(value)  # type: Node
+            return BytesExpr(value)
         else:
-            node = StrExpr(value)
-        return node
+            return StrExpr(value)
 
-    def parse_unicode_literal(self) -> Node:
+    def parse_unicode_literal(self) -> Union[StrExpr, UnicodeExpr]:
         # XXX \uxxxx literals
         token = self.expect_type(UnicodeLit)
         value = cast(UnicodeLit, token).parsed()
@@ -1614,29 +1609,25 @@ class Parser:
             value += token.parsed()
         if self.pyversion[0] >= 3:
             # Python 3.3 supports u'...' as an alias of '...'.
-            node = StrExpr(value)  # type: Node
+            return StrExpr(value)
         else:
-            node = UnicodeExpr(value)
-        return node
+            return UnicodeExpr(value)
 
     def parse_float_expr(self) -> FloatExpr:
         tok = self.expect_type(FloatLit)
-        node = FloatExpr(float(tok.string))
-        return node
+        return FloatExpr(float(tok.string))
 
     def parse_complex_expr(self) -> ComplexExpr:
         tok = self.expect_type(ComplexLit)
-        node = ComplexExpr(complex(tok.string))
-        return node
+        return ComplexExpr(complex(tok.string))
 
-    def parse_call_expr(self, callee: Any) -> CallExpr:
+    def parse_call_expr(self, callee: Expression) -> CallExpr:
         self.expect('(')
         args, kinds, names = self.parse_arg_expr()
         self.expect(')')
-        node = CallExpr(callee, args, kinds, names)
-        return node
+        return CallExpr(callee, args, kinds, names)
 
-    def parse_arg_expr(self) -> Tuple[List[Node], List[int], List[str]]:
+    def parse_arg_expr(self) -> Tuple[List[Expression], List[int], List[str]]:
         """Parse arguments in a call expression (within '(' and ')').
 
         Return a tuple with these items:
@@ -1644,7 +1635,7 @@ class Parser:
           argument kinds
           argument names (for named arguments; None for ordinary args)
         """
-        args = []   # type: List[Node]
+        args = []   # type: List[Expression]
         kinds = []  # type: List[int]
         names = []  # type: List[str]
         var_arg = False
@@ -1682,18 +1673,17 @@ class Parser:
             self.expect(',')
         return args, kinds, names
 
-    def parse_member_expr(self, expr: Any) -> Node:
+    def parse_member_expr(self, expr: Expression) -> Union[SuperExpr, MemberExpr]:
         self.expect('.')
         name = self.expect_type(Name)
         if (isinstance(expr, CallExpr) and isinstance(expr.callee, NameExpr)
                 and expr.callee.name == 'super'):
             # super() expression
-            node = SuperExpr(name.string)  # type: Node
+            return SuperExpr(name.string)
         else:
-            node = MemberExpr(expr, name.string)
-        return node
+            return MemberExpr(expr, name.string)
 
-    def parse_index_expr(self, base: Any) -> IndexExpr:
+    def parse_index_expr(self, base: Expression) -> IndexExpr:
         self.expect('[')
         index = self.parse_slice_item()
         if self.current_str() == ',':
@@ -1705,12 +1695,12 @@ class Parser:
                     break
                 items.append(self.parse_slice_item())
             index = TupleExpr(items)
-            index.set_line(items[0].line)
+            index.set_line(items[0])
         self.expect(']')
         node = IndexExpr(base, index)
         return node
 
-    def parse_slice_item(self) -> Node:
+    def parse_slice_item(self) -> Expression:
         if self.current_str() != ':':
             if self.current_str() == '...':
                 # Ellipsis is valid here even in Python 2 (but not elsewhere).
@@ -1735,10 +1725,11 @@ class Parser:
                 self.expect(':')
                 if self.current_str() not in (']', ','):
                     stride = self.parse_expression(precedence[','])
-            item = SliceExpr(index, end_index, stride).set_line(colon.line)
+            item = SliceExpr(index, end_index, stride)
+            item.set_line(colon)
         return item
 
-    def parse_bin_op_expr(self, left: Node, prec: int) -> OpExpr:
+    def parse_bin_op_expr(self, left: Expression, prec: int) -> OpExpr:
         op = self.expect_type(Op)
         op_str = op.string
         if op_str == '~':
@@ -1748,7 +1739,7 @@ class Parser:
         node = OpExpr(op_str, left, right)
         return node
 
-    def parse_comparison_expr(self, left: Node, prec: int) -> ComparisonExpr:
+    def parse_comparison_expr(self, left: Expression, prec: int) -> ComparisonExpr:
         operators_str = []
         operands = [left]
 
@@ -1799,13 +1790,15 @@ class Parser:
         # less precise.
         ret_type = UnboundType('__builtins__.object')
         typ = self.build_func_annotation(ret_type, args,
-                                         lambda_tok.line, is_default_ret=True)
+                                         lambda_tok.line, lambda_tok.column, is_default_ret=True)
 
         colon = self.expect(':')
 
         expr = self.parse_expression(precedence[','])
 
-        nodes = [ReturnStmt(expr).set_line(lambda_tok)]
+        return_stmt = ReturnStmt(expr)
+        return_stmt.set_line(lambda_tok)
+        nodes = [return_stmt]  # type: List[Statement]
         # Potentially insert extra assignment statements to the beginning of the
         # body, used to decompose Python 2 tuple arguments.
         nodes[:0] = extra_stmts
@@ -1831,11 +1824,11 @@ class Parser:
         if isinstance(self.current(), Indent):
             return self.expect_type(Indent)
         else:
-            self.fail('Expected an indented block', self.current().line)
+            self.fail('Expected an indented block', self.current().line, self.current().column)
             return none
 
-    def fail(self, msg: str, line: int) -> None:
-        self.errors.report(line, msg)
+    def fail(self, msg: str, line: int, column: int) -> None:
+        self.errors.report(line, column, msg)
 
     def expect_type(self, typ: type) -> Token:
         current = self.current()
@@ -1875,7 +1868,7 @@ class Parser:
             formatted_reason = ": {}".format(reason) if reason else ""
             msg = 'Parse error before {}{}'.format(token_repr(tok), formatted_reason)
 
-        self.errors.report(tok.line, msg)
+        self.errors.report(tok.line, tok.column, msg)
 
         if skip:
             self.skip_until_next_line()
@@ -1928,7 +1921,7 @@ class Parser:
             tokens = lex.lex(type_as_str, token.line)[0]
             if len(tokens) < 2:
                 # Empty annotation (only Eof token)
-                self.errors.report(token.line, 'Empty type annotation')
+                self.errors.report(token.line, token.column, 'Empty type annotation')
                 return None
             try:
                 if not signature:
diff --git a/mypy/report.py b/mypy/report.py
index 180bbe6..313fbf4 100644
--- a/mypy/report.py
+++ b/mypy/report.py
@@ -2,11 +2,12 @@
 
 from abc import ABCMeta, abstractmethod
 import cgi
+import json
 import os
 import shutil
 import tokenize
 
-from typing import Callable, Dict, List, Tuple, cast
+from typing import Callable, Dict, List, Optional, Tuple, cast
 
 from mypy.nodes import MypyFile, Node, FuncDef
 from mypy import stats
@@ -105,6 +106,115 @@ class LineCountReporter(AbstractReporter):
 reporter_classes['linecount'] = LineCountReporter
 
 
+class LineCoverageVisitor(TraverserVisitor):
+    def __init__(self, source: List[str]) -> None:
+        self.source = source
+
+        # For each line of source, we maintain a pair of
+        #  * the indentation level of the surrounding function
+        #    (-1 if not inside a function), and
+        #  * whether the surrounding function is typed.
+        # Initially, everything is covered at indentation level -1.
+        self.lines_covered = [(-1, True) for l in source]
+
+    # The Python AST has position information for the starts of
+    # elements, but not for their ends. Fortunately the
+    # indentation-based syntax makes it pretty easy to find where a
+    # block ends without doing any real parsing.
+
+    # TODO: Handle line continuations (explicit and implicit) and
+    # multi-line string literals. (But at least line continuations
+    # are normally more indented than their surrounding block anyways,
+    # by PEP 8.)
+
+    def indentation_level(self, line_number: int) -> Optional[int]:
+        """Return the indentation of a line of the source (specified by
+        zero-indexed line number). Returns None for blank lines or comments."""
+        line = self.source[line_number]
+        indent = 0
+        for char in list(line):
+            if char == ' ':
+                indent += 1
+            elif char == '\t':
+                indent = 8 * ((indent + 8) // 8)
+            elif char == '#':
+                # Line is a comment; ignore it
+                return None
+            elif char == '\n':
+                # Line is entirely whitespace; ignore it
+                return None
+            # TODO line continuation (\)
+            else:
+                # Found a non-whitespace character
+                return indent
+        # Line is entirely whitespace, and at end of file
+        # with no trailing newline; ignore it
+        return None
+
+    def visit_func_def(self, defn: FuncDef) -> None:
+        start_line = defn.get_line() - 1
+        start_indent = self.indentation_level(start_line)
+        cur_line = start_line + 1
+        end_line = cur_line
+        # After this loop, function body will be lines [start_line, end_line)
+        while cur_line < len(self.source):
+            cur_indent = self.indentation_level(cur_line)
+            if cur_indent is None:
+                # Consume the line, but don't mark it as belonging to the function yet.
+                cur_line += 1
+            elif cur_indent > start_indent:
+                # A non-blank line that belongs to the function.
+                cur_line += 1
+                end_line = cur_line
+            else:
+                # We reached a line outside the function definition.
+                break
+
+        is_typed = defn.type is not None
+        for line in range(start_line, end_line):
+            old_indent, _ = self.lines_covered[line]
+            assert start_indent > old_indent
+            self.lines_covered[line] = (start_indent, is_typed)
+
+        # Visit the body, in case there are nested functions
+        super().visit_func_def(defn)
+
+
+class LineCoverageReporter(AbstractReporter):
+    """Exact line coverage reporter.
+
+    This reporter writes a JSON dictionary with one field 'lines' to
+    the file 'coverage.json' in the specified report directory. The
+    value of that field is a dictionary which associates to each
+    source file's absolute pathname the list of line numbers that
+    belong to typed functions in that file.
+    """
+    def __init__(self, reports: Reports, output_dir: str) -> None:
+        super().__init__(reports, output_dir)
+        self.lines_covered = {}  # type: Dict[str, List[int]]
+
+        stats.ensure_dir_exists(output_dir)
+
+    def on_file(self, tree: MypyFile, type_map: Dict[Node, Type]) -> None:
+        tree_source = open(tree.path).readlines()
+
+        coverage_visitor = LineCoverageVisitor(tree_source)
+        tree.accept(coverage_visitor)
+
+        covered_lines = []
+        for line_number, (_, typed) in enumerate(coverage_visitor.lines_covered):
+            if typed:
+                covered_lines.append(line_number + 1)
+
+        self.lines_covered[os.path.abspath(tree.path)] = covered_lines
+
+    def on_finish(self) -> None:
+        with open(os.path.join(self.output_dir, 'coverage.json'), 'w') as f:
+            json.dump({'lines': self.lines_covered}, f)
+
+reporter_classes['linecoverage'] = LineCoverageReporter
+
+
 class OldHtmlReporter(AbstractReporter):
     """Old HTML reporter.
 
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 9956105..836e56b 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -43,14 +43,13 @@ TODO: Check if the third pass slows down type checking significantly.
   traverse the entire AST.
 """
 
-import sys
 from typing import (
-    List, Dict, Set, Tuple, cast, Any, overload, TypeVar, Union, Optional, Callable
+    List, Dict, Set, Tuple, cast, Any, TypeVar, Union, Optional, Callable
 )
 
 from mypy.nodes import (
     MypyFile, TypeInfo, Node, AssignmentStmt, FuncDef, OverloadedFuncDef,
-    ClassDef, Var, GDEF, MODULE_REF, FuncItem, Import,
+    ClassDef, Var, GDEF, MODULE_REF, FuncItem, Import, Expression, Lvalue,
     ImportFrom, ImportAll, Block, LDEF, NameExpr, MemberExpr,
     IndexExpr, TupleExpr, ListExpr, ExpressionStmt, ReturnStmt,
     RaiseStmt, AssertStmt, OperatorAssignmentStmt, WhileStmt,
@@ -61,10 +60,10 @@ from mypy.nodes import (
     FuncExpr, MDEF, FuncBase, Decorator, SetExpr, TypeVarExpr, NewTypeExpr,
     StrExpr, BytesExpr, PrintStmt, ConditionalExpr, PromoteExpr,
     ComparisonExpr, StarExpr, ARG_POS, ARG_NAMED, MroError, type_aliases,
-    YieldFromExpr, NamedTupleExpr, NonlocalDecl,
+    YieldFromExpr, NamedTupleExpr, NonlocalDecl, SymbolNode,
     SetComprehension, DictionaryComprehension, TYPE_ALIAS, TypeAliasExpr,
     YieldExpr, ExecStmt, Argument, BackquoteExpr, ImportBase, AwaitExpr,
-    IntExpr, FloatExpr, UnicodeExpr,
+    IntExpr, FloatExpr, UnicodeExpr, EllipsisExpr,
     COVARIANT, CONTRAVARIANT, INVARIANT, UNBOUND_IMPORTED, LITERAL_YES,
 )
 from mypy.visitor import NodeVisitor
@@ -72,17 +71,13 @@ from mypy.traverser import TraverserVisitor
 from mypy.errors import Errors, report_internal_error
 from mypy.types import (
     NoneTyp, CallableType, Overloaded, Instance, Type, TypeVarType, AnyType,
-    FunctionLike, UnboundType, TypeList, ErrorType, TypeVarDef, Void,
-    replace_leading_arg_type, TupleType, UnionType, StarType, EllipsisType
-)
+    FunctionLike, UnboundType, TypeList, TypeVarDef,
+    replace_leading_arg_type, TupleType, UnionType, StarType, EllipsisType, TypeType)
 from mypy.nodes import function_type, implicit_module_attrs
 from mypy.typeanal import TypeAnalyser, TypeAnalyserPass3, analyze_type_alias
 from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
-from mypy.lex import lex
-from mypy.parsetype import parse_type
 from mypy.sametypes import is_same_type
 from mypy.erasetype import erase_typevars
-from mypy import defaults
 from mypy.options import Options
 
 
@@ -172,8 +167,8 @@ class SemanticAnalyzer(NodeVisitor):
     bound_tvars = None  # type: List[SymbolTableNode]
     # Stack of type variables that were bound by outer classess
     tvar_stack = None  # type: List[List[SymbolTableNode]]
-    # Do weak type checking in this file
-    weak_opts = set()        # type: Set[str]
+    # Per-file options
+    options = None  # type: Options
 
     # Stack of functions being analyzed
     function_stack = None  # type: List[FuncItem]
@@ -195,9 +190,7 @@ class SemanticAnalyzer(NodeVisitor):
     errors = None  # type: Errors     # Keeps track of generated errors
 
     def __init__(self,
-                 lib_path: List[str],
-                 errors: Errors,
-                 options: Options) -> None:
+                 lib_path: List[str], errors: Errors) -> None:
         """Construct semantic analyzer.
 
         Use lib_path to search for modules, and report analysis errors
@@ -216,18 +209,17 @@ class SemanticAnalyzer(NodeVisitor):
         self.lib_path = lib_path
         self.errors = errors
         self.modules = {}
-        self.options = options
         self.postpone_nested_functions_stack = [FUNCTION_BOTH_PHASES]
         self.postponed_functions_stack = []
         self.all_exports = set()  # type: Set[str]
 
-    def visit_file(self, file_node: MypyFile, fnam: str) -> None:
+    def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None:
+        self.options = options
         self.errors.set_file(fnam)
         self.cur_mod_node = file_node
         self.cur_mod_id = file_node.fullname()
         self.is_stub_file = fnam.lower().endswith('.pyi')
         self.globals = file_node.names
-        self.weak_opts = file_node.weak_opts
 
         if 'builtins' in self.modules:
             self.globals['__builtins__'] = SymbolTableNode(
@@ -251,6 +243,8 @@ class SemanticAnalyzer(NodeVisitor):
                 if name not in self.all_exports:
                     g.module_public = False
 
+        del self.options
+
     def visit_func_def(self, defn: FuncDef) -> None:
         phase_info = self.postpone_nested_functions_stack[-1]
         if phase_info != FUNCTION_SECOND_PHASE:
@@ -319,9 +313,8 @@ class SemanticAnalyzer(NodeVisitor):
                 # A coroutine defined as `async def foo(...) -> T: ...`
                 # has external return type `Awaitable[T]`.
                 defn.type = defn.type.copy_modified(
-                    ret_type=Instance(
-                        self.named_type_or_none('typing.Awaitable').type,
-                        [defn.type.ret_type]))
+                    ret_type = self.named_type_or_none('typing.Awaitable',
+                                                       [defn.type.ret_type]))
             self.errors.pop_function()
 
     def prepare_method_signature(self, func: FuncDef) -> None:
@@ -409,7 +402,7 @@ class SemanticAnalyzer(NodeVisitor):
             assert False, 'Unsupported type %s' % type
         return result
 
-    def is_defined_type_var(self, tvar: str, context: Node) -> bool:
+    def is_defined_type_var(self, tvar: str, context: Context) -> bool:
         return self.lookup_qualified(tvar, context).kind == BOUND_TVAR
 
     def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
@@ -610,7 +603,7 @@ class SemanticAnalyzer(NodeVisitor):
         if self.bound_tvars:
             enable_typevars(self.bound_tvars)
 
-    def analyze_class_decorator(self, defn: ClassDef, decorator: Node) -> None:
+    def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None:
         decorator.accept(self)
 
     def setup_is_builtinclass(self, defn: ClassDef) -> None:
@@ -734,7 +727,7 @@ class SemanticAnalyzer(NodeVisitor):
     def setup_class_def_analysis(self, defn: ClassDef) -> None:
         """Prepare for the analysis of a class definition."""
         if not defn.info:
-            defn.info = TypeInfo(SymbolTable(), defn)
+            defn.info = TypeInfo(SymbolTable(), defn, self.cur_mod_id)
             defn.info._fullname = defn.info.name()
         if self.is_func_scope() or self.type:
             kind = MDEF
@@ -751,38 +744,46 @@ class SemanticAnalyzer(NodeVisitor):
         """
 
         base_types = []  # type: List[Instance]
+        info = defn.info
         for base_expr in defn.base_type_exprs:
             try:
                 base = self.expr_to_analyzed_type(base_expr)
             except TypeTranslationError:
                 self.fail('Invalid base class', base_expr)
-                defn.info.fallback_to_any = True
+                info.fallback_to_any = True
                 continue
 
             if isinstance(base, TupleType):
-                if defn.info.tuple_type:
+                if info.tuple_type:
                     self.fail("Class has two incompatible bases derived from tuple", defn)
+                    defn.has_incompatible_baseclass = True
                 if (not self.is_stub_file
-                        and not defn.info.is_named_tuple
+                        and not info.is_named_tuple
                         and base.fallback.type.fullname() == 'builtins.tuple'):
                     self.fail("Tuple[...] not supported as a base class outside a stub file", defn)
-                defn.info.tuple_type = base
+                info.tuple_type = base
                 base_types.append(base.fallback)
             elif isinstance(base, Instance):
                 if base.type.is_newtype:
                     self.fail("Cannot subclass NewType", defn)
                 base_types.append(base)
             elif isinstance(base, AnyType):
-                defn.info.fallback_to_any = True
+                if self.options.disallow_subclassing_any:
+                    if isinstance(base_expr, (NameExpr, MemberExpr)):
+                        msg = "Class cannot subclass '{}' (has type 'Any')".format(base_expr.name)
+                    else:
+                        msg = "Class cannot subclass value of type 'Any'"
+                    self.fail(msg, base_expr)
+                info.fallback_to_any = True
             else:
                 self.fail('Invalid base class', base_expr)
-                defn.info.fallback_to_any = True
+                info.fallback_to_any = True
 
         # Add 'object' as implicit base if there is no other base class.
         if (not base_types and defn.fullname != 'builtins.object'):
             base_types.append(self.object_type())
 
-        defn.info.bases = base_types
+        info.bases = base_types
 
         # Calculate the MRO. It might be incomplete at this point if
         # the bases of defn include classes imported from other
@@ -794,10 +795,10 @@ class SemanticAnalyzer(NodeVisitor):
         calculate_class_mro(defn, self.fail_blocker)
         # If there are cyclic imports, we may be missing 'object' in
         # the MRO. Fix MRO if needed.
-        if defn.info.mro and defn.info.mro[-1].fullname() != 'builtins.object':
-            defn.info.mro.append(self.object_type().type)
+        if info.mro and info.mro[-1].fullname() != 'builtins.object':
+            info.mro.append(self.object_type().type)
 
-    def expr_to_analyzed_type(self, expr: Node) -> Type:
+    def expr_to_analyzed_type(self, expr: Expression) -> Type:
         if isinstance(expr, CallExpr):
             expr.accept(self)
             info = self.check_namedtuple(expr)
@@ -866,11 +867,11 @@ class SemanticAnalyzer(NodeVisitor):
         sym = self.lookup_qualified(qualified_name, None)
         return Instance(cast(TypeInfo, sym.node), args or [])
 
-    def named_type_or_none(self, qualified_name: str) -> Instance:
+    def named_type_or_none(self, qualified_name: str, args: List[Type] = None) -> Instance:
         sym = self.lookup_fully_qualified_or_none(qualified_name)
         if not sym:
             return None
-        return Instance(cast(TypeInfo, sym.node), [])
+        return Instance(cast(TypeInfo, sym.node), args or [])
 
     def is_instance_type(self, t: Type) -> bool:
         return isinstance(t, Instance)
@@ -894,6 +895,30 @@ class SemanticAnalyzer(NodeVisitor):
                 base = id.split('.')[0]
                 self.add_module_symbol(base, base, module_public=module_public,
                                        context=i)
+                self.add_submodules_to_parent_modules(id, module_public)
+
+    def add_submodules_to_parent_modules(self, id: str, module_public: bool) -> None:
+        """Recursively adds a reference to a newly loaded submodule to its parent.
+
+        When you import a submodule in any way, Python will add a reference to that
+        submodule to its parent. So, if you do something like `import A.B` or
+        `from A import B` or `from A.B import Foo`, Python will add a reference to
+        module A.B to A's namespace.
+
+        Note that this "parent patching" process is completely independent from any
+        changes made to the *importer's* namespace. For example, if you have a file
+        named `foo.py` where you do `from A.B import Bar`, then foo's namespace will
+        be modified to contain a reference to only Bar. Independently, A's namespace
+        will be modified to contain a reference to `A.B`.
+        """
+        while '.' in id:
+            parent, child = id.rsplit('.', 1)
+            modules_loaded = parent in self.modules and id in self.modules
+            if modules_loaded and child not in self.modules[parent].names:
+                sym = SymbolTableNode(MODULE_REF, self.modules[id], parent,
+                        module_public=module_public)
+                self.modules[parent].names[child] = sym
+            id = parent
 
     def add_module_symbol(self, id: str, as_id: str, module_public: bool,
                           context: Context) -> None:
@@ -902,14 +927,25 @@ class SemanticAnalyzer(NodeVisitor):
             self.add_symbol(as_id, SymbolTableNode(MODULE_REF, m, self.cur_mod_id,
                                                    module_public=module_public), context)
         else:
-            self.add_unknown_symbol(as_id, context)
+            self.add_unknown_symbol(as_id, context, is_import=True)
 
     def visit_import_from(self, imp: ImportFrom) -> None:
         import_id = self.correct_relative_import(imp)
         if import_id in self.modules:
             module = self.modules[import_id]
+            self.add_submodules_to_parent_modules(import_id, True)
             for id, as_id in imp.names:
                 node = module.names.get(id)
+
+                # If the module does not contain a symbol with the name 'id',
+                # try checking if it's a module instead.
+                if id not in module.names or node.kind == UNBOUND_IMPORTED:
+                    possible_module_id = import_id + '.' + id
+                    mod = self.modules.get(possible_module_id)
+                    if mod is not None:
+                        node = SymbolTableNode(MODULE_REF, mod, import_id)
+                        self.add_submodules_to_parent_modules(possible_module_id, True)
+
                 if node and node.kind != UNBOUND_IMPORTED:
                     node = self.normalize_type_alias(node, imp)
                     if not node:
@@ -929,7 +965,7 @@ class SemanticAnalyzer(NodeVisitor):
                                              module_public=module_public)
                     self.add_symbol(imported_id, symbol, imp)
                 else:
-                    message = "Module has no attribute '{}'".format(id)
+                    message = "Module '{}' has no attribute '{}'".format(import_id, id)
                     extra = self.undefined_name_extra_info('{}.{}'.format(import_id, id))
                     if extra:
                         message += " {}".format(extra)
@@ -937,14 +973,14 @@ class SemanticAnalyzer(NodeVisitor):
         else:
             # Missing module.
             for id, as_id in imp.names:
-                self.add_unknown_symbol(as_id or id, imp)
+                self.add_unknown_symbol(as_id or id, imp, is_import=True)
 
     def process_import_over_existing_name(self,
                                           imported_id: str, existing_symbol: SymbolTableNode,
                                           module_symbol: SymbolTableNode,
                                           import_node: ImportBase) -> bool:
         if (existing_symbol.kind in (LDEF, GDEF, MDEF) and
-                isinstance(existing_symbol.node, (Var, FuncDef))):
+                isinstance(existing_symbol.node, (Var, FuncDef, TypeInfo))):
             # This is a valid import over an existing definition in the file. Construct a dummy
             # assignment that we'll use to type check the import.
             lvalue = NameExpr(imported_id)
@@ -991,6 +1027,7 @@ class SemanticAnalyzer(NodeVisitor):
         i_id = self.correct_relative_import(i)
         if i_id in self.modules:
             m = self.modules[i_id]
+            self.add_submodules_to_parent_modules(i_id, True)
             for name, node in m.names.items():
                 node = self.normalize_type_alias(node, i)
                 if not name.startswith('_') and node.module_public:
@@ -1006,7 +1043,7 @@ class SemanticAnalyzer(NodeVisitor):
             # Don't add any dummy symbols for 'from x import *' if 'x' is unknown.
             pass
 
-    def add_unknown_symbol(self, name: str, context: Context) -> None:
+    def add_unknown_symbol(self, name: str, context: Context, is_import: bool = False) -> None:
         var = Var(name)
         if self.type:
             var._fullname = self.type.fullname() + "." + name
@@ -1014,6 +1051,7 @@ class SemanticAnalyzer(NodeVisitor):
             var._fullname = self.qualified_name(name)
         var.is_ready = True
         var.type = AnyType()
+        var.is_suppressed_import = is_import
         self.add_symbol(name, SymbolTableNode(GDEF, var, self.cur_mod_id), context)
 
     #
@@ -1094,11 +1132,10 @@ class SemanticAnalyzer(NodeVisitor):
                 isinstance(s.rvalue, (ListExpr, TupleExpr))):
             self.add_exports(*s.rvalue.items)
 
-    def analyze_simple_literal_type(self, rvalue: Node) -> Optional[Type]:
+    def analyze_simple_literal_type(self, rvalue: Expression) -> Optional[Type]:
         """Return builtins.int if rvalue is an int literal, etc."""
-        if self.weak_opts or self.options.semantic_analysis_only or self.function_stack:
-            # Skip this if any weak options are set.
-            # Also skip if we're only doing the semantic analysis pass.
+        if self.options.semantic_analysis_only or self.function_stack:
+            # Skip this if we're only doing the semantic analysis pass.
             # This is mostly to avoid breaking unit tests.
             # Also skip inside a function; this is to avoid confusing
             # the code that handles dead code due to isinstance()
@@ -1136,7 +1173,7 @@ class SemanticAnalyzer(NodeVisitor):
                         #       just an alias for the type.
                         self.globals[lvalue.name].node = node
 
-    def analyze_lvalue(self, lval: Node, nested: bool = False,
+    def analyze_lvalue(self, lval: Lvalue, nested: bool = False,
                        add_global: bool = False,
                        explicit_type: bool = False) -> None:
         """Analyze an lvalue or assignment target.
@@ -1259,11 +1296,11 @@ class SemanticAnalyzer(NodeVisitor):
         node = memberexpr.expr.node
         return isinstance(node, Var) and node.is_self
 
-    def check_lvalue_validity(self, node: Node, ctx: Context) -> None:
+    def check_lvalue_validity(self, node: Union[Expression, SymbolNode], ctx: Context) -> None:
         if isinstance(node, (TypeInfo, TypeVarExpr)):
             self.fail('Invalid assignment target', ctx)
 
-    def store_declared_types(self, lvalue: Node, typ: Type) -> None:
+    def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None:
         if isinstance(typ, StarType) and not isinstance(lvalue, StarExpr):
             self.fail('Star type only allowed for starred expressions', lvalue)
         if isinstance(lvalue, RefExpr):
@@ -1300,6 +1337,7 @@ class SemanticAnalyzer(NodeVisitor):
             return
 
         old_type = self.check_newtype_args(name, call, s)
+        call.analyzed = NewTypeExpr(name, old_type, line=call.line)
         if old_type is None:
             return
 
@@ -1321,8 +1359,7 @@ class SemanticAnalyzer(NodeVisitor):
             return
         # TODO: why does NewType work in local scopes despite always being of kind GDEF?
         node.kind = GDEF
-        node.node = newtype_class_info
-        call.analyzed = NewTypeExpr(newtype_class_info).set_line(call.line)
+        call.analyzed.info = node.node = newtype_class_info
 
     def analyze_newtype_declaration(self,
             s: AssignmentStmt) -> Tuple[Optional[str], Optional[CallExpr]]:
@@ -1345,7 +1382,6 @@ class SemanticAnalyzer(NodeVisitor):
             # overwritten later with a fully complete NewTypeExpr if there are no other
             # errors with the NewType() call.
             call = s.rvalue
-            call.analyzed = NewTypeExpr(None).set_line(call.line)
 
         return name, call
 
@@ -1361,7 +1397,8 @@ class SemanticAnalyzer(NodeVisitor):
             self.fail("Argument 1 to NewType(...) must be a string literal", context)
             has_failed = True
         elif cast(StrExpr, call.args[0]).value != name:
-            self.fail("Argument 1 to NewType(...) does not match variable name", context)
+            msg = "String argument 1 '{}' to NewType(...) does not match variable name '{}'"
+            self.fail(msg.format(cast(StrExpr, call.args[0]).value, name), context)
             has_failed = True
 
         # Check second argument
@@ -1379,13 +1416,7 @@ class SemanticAnalyzer(NodeVisitor):
         return None if has_failed else old_type
 
     def build_newtype_typeinfo(self, name: str, old_type: Type, base_type: Instance) -> TypeInfo:
-        class_def = ClassDef(name, Block([]))
-        class_def.fullname = self.qualified_name(name)
-
-        symbols = SymbolTable()
-        info = TypeInfo(symbols, class_def)
-        info.mro = [info] + base_type.type.mro
-        info.bases = [base_type]
+        info = self.basic_new_typeinfo(name, base_type)
         info.is_newtype = True
 
         # Add __init__ method
@@ -1400,7 +1431,7 @@ class SemanticAnalyzer(NodeVisitor):
             name=name)
         init_func = FuncDef('__init__', args, Block([]), typ=signature)
         init_func.info = info
-        symbols['__init__'] = SymbolTableNode(MDEF, init_func)
+        info.names['__init__'] = SymbolTableNode(MDEF, init_func)
 
         return info
 
@@ -1452,7 +1483,8 @@ class SemanticAnalyzer(NodeVisitor):
             self.fail("TypeVar() expects a string literal as first argument", context)
             return False
         if cast(StrExpr, call.args[0]).value != name:
-            self.fail("Unexpected TypeVar() argument value", context)
+            msg = "String argument 1 '{}' to TypeVar(...) does not match variable name '{}'"
+            self.fail(msg.format(cast(StrExpr, call.args[0]).value, name), context)
             return False
         return True
 
@@ -1472,7 +1504,7 @@ class SemanticAnalyzer(NodeVisitor):
             return None
         return call
 
-    def process_typevar_parameters(self, args: List[Node],
+    def process_typevar_parameters(self, args: List[Expression],
                                    names: List[Optional[str]],
                                    kinds: List[int],
                                    has_values: bool,
@@ -1549,7 +1581,7 @@ class SemanticAnalyzer(NodeVisitor):
         # TODO call.analyzed
         node.node = named_tuple
 
-    def check_namedtuple(self, node: Node, var_name: str = None) -> TypeInfo:
+    def check_namedtuple(self, node: Expression, var_name: str = None) -> TypeInfo:
         """Check if a call defines a namedtuple.
 
         The optional var_name argument is the name of the variable to
@@ -1581,7 +1613,8 @@ class SemanticAnalyzer(NodeVisitor):
             info = self.build_namedtuple_typeinfo(name, items, types)
             # Store it as a global just in case it would remain anonymous.
             self.globals[name] = SymbolTableNode(GDEF, info, self.cur_mod_id)
-        call.analyzed = NamedTupleExpr(info).set_line(call.line)
+        call.analyzed = NamedTupleExpr(info)
+        call.analyzed.set_line(call.line, call.column)
         return info
 
     def parse_namedtuple_args(self, call: CallExpr,
@@ -1591,6 +1624,7 @@ class SemanticAnalyzer(NodeVisitor):
         if len(args) < 2:
             return self.fail_namedtuple_arg("Too few arguments for namedtuple()", call)
         if len(args) > 2:
+            # FIX incorrect. There are two additional parameters
             return self.fail_namedtuple_arg("Too many arguments for namedtuple()", call)
         if call.arg_kinds != [ARG_POS, ARG_POS]:
             return self.fail_namedtuple_arg("Unexpected arguments to namedtuple()", call)
@@ -1603,7 +1637,7 @@ class SemanticAnalyzer(NodeVisitor):
             if (fullname == 'collections.namedtuple'
                     and isinstance(args[1], (StrExpr, BytesExpr, UnicodeExpr))):
                 str_expr = cast(StrExpr, args[1])
-                items = str_expr.value.split()
+                items = str_expr.value.replace(',', ' ').split()
             else:
                 return self.fail_namedtuple_arg(
                     "List literal expected as the second argument to namedtuple()", call)
@@ -1621,9 +1655,13 @@ class SemanticAnalyzer(NodeVisitor):
                 items, types, ok = self.parse_namedtuple_fields_with_types(listexpr.items, call)
         if not types:
             types = [AnyType() for _ in items]
+        underscore = [item for item in items if item.startswith('_')]
+        if underscore:
+            self.fail("namedtuple() Field names cannot start with an underscore: "
+                      + ', '.join(underscore), call)
         return items, types, ok
 
-    def parse_namedtuple_fields_with_types(self, nodes: List[Node],
+    def parse_namedtuple_fields_with_types(self, nodes: List[Expression],
                                            context: Context) -> Tuple[List[str], List[Type], bool]:
         items = []  # type: List[str]
         types = []  # type: List[Type]
@@ -1651,51 +1689,84 @@ class SemanticAnalyzer(NodeVisitor):
         self.fail(message, context)
         return [], [], False
 
-    def build_namedtuple_typeinfo(self, name: str, items: List[str],
-                                  types: List[Type]) -> TypeInfo:
-        symbols = SymbolTable()
+    def basic_new_typeinfo(self, name: str, basetype_or_fallback: Instance) -> TypeInfo:
         class_def = ClassDef(name, Block([]))
         class_def.fullname = self.qualified_name(name)
-        info = TypeInfo(symbols, class_def)
-        # Add named tuple items as attributes.
-        # TODO: Make them read-only.
-        for item, typ in zip(items, types):
-            var = Var(item)
-            var.info = info
-            var.type = typ
-            symbols[item] = SymbolTableNode(MDEF, var)
-        # Add a __init__ method.
-        init = self.make_namedtuple_init(info, items, types)
-        symbols['__init__'] = SymbolTableNode(MDEF, init)
-        info.tuple_type = TupleType(types, self.named_type('__builtins__.tuple', [AnyType()]))
+
+        info = TypeInfo(SymbolTable(), class_def, self.cur_mod_id)
+        info.mro = [info] + basetype_or_fallback.type.mro
+        info.bases = [basetype_or_fallback]
+        return info
+
+    def build_namedtuple_typeinfo(self, name: str, items: List[str],
+                                  types: List[Type]) -> TypeInfo:
+        strtype = self.named_type('__builtins__.str')  # type: Type
+        basetuple_type = self.named_type('__builtins__.tuple', [AnyType()])
+        dictype = (self.named_type_or_none('builtins.dict', [strtype, AnyType()])
+                   or self.object_type())
+        # Actual signature should return OrderedDict[str, Union[types]]
+        ordereddictype = (self.named_type_or_none('builtins.dict', [strtype, AnyType()])
+                          or self.object_type())
+        fallback = self.named_type('__builtins__.tuple', types)
+        # Note: actual signature should accept an invariant version of Iterable[UnionType[types]].
+        # but it can't be expressed. 'new' and 'len' should be callable types.
+        iterable_type = self.named_type_or_none('typing.Iterable', [AnyType()])
+        function_type = self.named_type('__builtins__.function')
+
+        info = self.basic_new_typeinfo(name, fallback)
         info.is_named_tuple = True
-        info.mro = [info] + info.tuple_type.fallback.type.mro
-        info.bases = [info.tuple_type.fallback]
+        info.tuple_type = TupleType(types, fallback)
+
+        def add_field(var: Var, is_initialized_in_class: bool = False,
+                      is_property: bool = False) -> None:
+            var.info = info
+            var.is_initialized_in_class = is_initialized_in_class
+            var.is_property = is_property
+            info.names[var.name()] = SymbolTableNode(MDEF, var)
+
+        vars = [Var(item, typ) for item, typ in zip(items, types)]
+        for var in vars:
+            add_field(var, is_property=True)
+
+        tuple_of_strings = TupleType([strtype for _ in items], basetuple_type)
+        add_field(Var('_fields', tuple_of_strings), is_initialized_in_class=True)
+        add_field(Var('_field_types', dictype), is_initialized_in_class=True)
+        add_field(Var('_source', strtype), is_initialized_in_class=True)
+
+        # TODO: SelfType should be bind to actual 'self'
+        this_type = self_type(info)
+
+        def add_method(funcname: str, ret: Type, args: List[Argument], name=None,
+                       is_classmethod=False) -> None:
+            if not is_classmethod:
+                args = [Argument(Var('self'), this_type, None, ARG_POS)] + args
+            types = [arg.type_annotation for arg in args]
+            items = [arg.variable.name() for arg in args]
+            arg_kinds = [arg.kind for arg in args]
+            signature = CallableType(types, arg_kinds, items, ret, function_type,
+                                     name=name or info.name() + '.' + funcname)
+            signature.is_classmethod_class = is_classmethod
+            func = FuncDef(funcname, args, Block([]), typ=signature)
+            func.info = info
+            func.is_class = is_classmethod
+            info.names[funcname] = SymbolTableNode(MDEF, func)
+
+        add_method('_replace', ret=this_type,
+                   args=[Argument(var, var.type, EllipsisExpr(), ARG_NAMED) for var in vars])
+        add_method('__init__', ret=NoneTyp(), name=info.name(),
+                   args=[Argument(var, var.type, None, ARG_POS) for var in vars])
+        add_method('_asdict', args=[], ret=ordereddictype)
+        # FIX: make it actual class method
+        add_method('_make', ret=this_type, is_classmethod=True,
+                   args=[Argument(Var('iterable', iterable_type), iterable_type, None, ARG_POS),
+                         Argument(Var('new'), AnyType(), EllipsisExpr(), ARG_NAMED),
+                         Argument(Var('len'), AnyType(), EllipsisExpr(), ARG_NAMED)])
         return info
 
     def make_argument(self, name: str, type: Type) -> Argument:
         return Argument(Var(name), type, None, ARG_POS)
 
-    def make_namedtuple_init(self, info: TypeInfo, items: List[str],
-                             types: List[Type]) -> FuncDef:
-        args = [self.make_argument(item, type) for item, type in zip(items, types)]
-        # TODO: Make sure that the self argument name is not visible?
-        args = [Argument(Var('__self'), NoneTyp(), None, ARG_POS)] + args
-        arg_kinds = [arg.kind for arg in args]
-        signature = CallableType([cast(Type, None)] + types,
-                                 arg_kinds,
-                                 ['__self'] + items,
-                                 NoneTyp(),
-                                 self.named_type('__builtins__.function'),
-                                 name=info.name())
-        func = FuncDef('__init__',
-                       args,
-                       Block([]),
-                       typ=signature)
-        func.info = info
-        return func
-
-    def analyze_types(self, items: List[Node]) -> List[Type]:
+    def analyze_types(self, items: List[Expression]) -> List[Type]:
         result = []  # type: List[Type]
         for node in items:
             try:
@@ -1856,7 +1927,7 @@ class SemanticAnalyzer(NodeVisitor):
         if not self.is_valid_del_target(s.expr):
             self.fail('Invalid delete target', s)
 
-    def is_valid_del_target(self, s: Node) -> bool:
+    def is_valid_del_target(self, s: Expression) -> bool:
         if isinstance(s, (IndexExpr, NameExpr, MemberExpr)):
             return True
         elif isinstance(s, TupleExpr):
@@ -2071,6 +2142,9 @@ class SemanticAnalyzer(NodeVisitor):
         base.accept(self)
         # Bind references to module attributes.
         if isinstance(base, RefExpr) and base.kind == MODULE_REF:
+            # This branch handles the case foo.bar where foo is a module.
+            # In this case base.node is the module's MypyFile and we look up
+            # bar in its namespace.  This must be done for all types of bar.
             file = cast(MypyFile, base.node)
             n = file.names.get(expr.name, None) if file is not None else None
             if n:
@@ -2089,9 +2163,25 @@ class SemanticAnalyzer(NodeVisitor):
                 # the build would terminate after semantic analysis
                 # and we wouldn't be able to report any type errors.
                 full_name = '%s.%s' % (file.fullname() if file is not None else None, expr.name)
+                mod_name = " '%s'" % file.fullname() if file is not None else ''
                 if full_name in obsolete_name_mapping:
-                    self.fail("Module has no attribute %r (it's now called %r)" % (
-                        expr.name, obsolete_name_mapping[full_name]), expr)
+                    self.fail("Module%s has no attribute %r (it's now called %r)" % (
+                        mod_name, expr.name, obsolete_name_mapping[full_name]), expr)
+        elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo):
+            # This branch handles the case C.bar where C is a class
+            # and bar is a module resulting from `import bar` inside
+            # class C.  Here base.node is a TypeInfo, and again we
+            # look up the name in its namespace.  This is done only
+            # when bar is a module; other things (e.g. methods)
+            # are handled by other code in checkmember.
+            n = base.node.names.get(expr.name)
+            if n is not None and n.kind == MODULE_REF:
+                n = self.normalize_type_alias(n, expr)
+                if not n:
+                    return
+                expr.kind = n.kind
+                expr.fullname = n.fullname
+                expr.node = n.node
 
     def visit_op_expr(self, expr: OpExpr) -> None:
         expr.left.accept(self)
@@ -2408,7 +2498,7 @@ class SemanticAnalyzer(NodeVisitor):
         node._fullname = name
         self.locals[-1][name] = SymbolTableNode(LDEF, node)
 
-    def add_exports(self, *exps: Node) -> None:
+    def add_exports(self, *exps: Expression) -> None:
         for exp in exps:
             if isinstance(exp, StrExpr):
                 self.all_exports.add(exp.value)
@@ -2441,7 +2531,9 @@ class SemanticAnalyzer(NodeVisitor):
                 self.function_stack and
                 self.function_stack[-1].is_dynamic()):
             return
-        self.errors.report(ctx.get_line(), msg, blocker=blocker)
+        # In case it's a bug and we don't really have context
+        assert ctx is not None, msg
+        self.errors.report(ctx.get_line(), ctx.get_column(), msg, blocker=blocker)
 
     def fail_blocker(self, msg: str, ctx: Context) -> None:
         self.fail(msg, ctx, blocker=True)
@@ -2451,7 +2543,7 @@ class SemanticAnalyzer(NodeVisitor):
                 self.function_stack and
                 self.function_stack[-1].is_dynamic()):
             return
-        self.errors.report(ctx.get_line(), msg, severity='note')
+        self.errors.report(ctx.get_line(), ctx.get_column(), msg, severity='note')
 
     def undefined_name_extra_info(self, fullname: str) -> Optional[str]:
         if fullname in obsolete_name_mapping:
@@ -2463,7 +2555,7 @@ class SemanticAnalyzer(NodeVisitor):
         try:
             node.accept(self)
         except Exception as err:
-            report_internal_error(err, self.errors.file, node.line, self.errors)
+            report_internal_error(err, self.errors.file, node.line, self.errors, self.options)
 
 
 class FirstPass(NodeVisitor):
@@ -2474,10 +2566,8 @@ class FirstPass(NodeVisitor):
 
     def __init__(self, sem: SemanticAnalyzer) -> None:
         self.sem = sem
-        self.pyversion = sem.options.python_version
-        self.platform = sem.options.platform
 
-    def analyze(self, file: MypyFile, fnam: str, mod_id: str) -> None:
+    def visit_file(self, file: MypyFile, fnam: str, mod_id: str, options: Options) -> None:
         """Perform the first analysis pass.
 
         Populate module global table.  Resolve the full names of
@@ -2493,6 +2583,9 @@ class FirstPass(NodeVisitor):
         analysis.
         """
         sem = self.sem
+        self.sem.options = options  # Needed because we sometimes call into it
+        self.pyversion = options.python_version
+        self.platform = options.platform
         sem.cur_mod_id = mod_id
         sem.errors.set_file(fnam)
         sem.globals = SymbolTable()
@@ -2537,6 +2630,8 @@ class FirstPass(NodeVisitor):
                 v._fullname = self.sem.qualified_name(name)
                 self.sem.globals[name] = SymbolTableNode(GDEF, v, self.sem.cur_mod_id)
 
+        del self.sem.options
+
     def visit_block(self, b: Block) -> None:
         if b.is_unreachable:
             return
@@ -2579,8 +2674,8 @@ class FirstPass(NodeVisitor):
     def visit_class_def(self, cdef: ClassDef) -> None:
         self.sem.check_no_global(cdef.name, cdef)
         cdef.fullname = self.sem.qualified_name(cdef.name)
-        info = TypeInfo(SymbolTable(), cdef)
-        info.set_line(cdef.line)
+        info = TypeInfo(SymbolTable(), cdef, self.sem.cur_mod_id)
+        info.set_line(cdef.line, cdef.column)
         cdef.info = info
         self.sem.globals[cdef.name] = SymbolTableNode(GDEF, info,
                                                       self.sem.cur_mod_id)
@@ -2589,11 +2684,12 @@ class FirstPass(NodeVisitor):
     def process_nested_classes(self, outer_def: ClassDef) -> None:
         for node in outer_def.defs.body:
             if isinstance(node, ClassDef):
-                node.info = TypeInfo(SymbolTable(), node)
+                node.info = TypeInfo(SymbolTable(), node, self.sem.cur_mod_id)
                 if outer_def.fullname:
                     node.info._fullname = outer_def.fullname + '.' + node.info.name()
                 else:
                     node.info._fullname = node.info.name()
+                node.fullname = node.info._fullname
                 symbol = SymbolTableNode(MDEF, node.info)
                 outer_def.info.names[node.name] = symbol
                 self.process_nested_classes(node)
@@ -2653,7 +2749,7 @@ class FirstPass(NodeVisitor):
     def visit_try_stmt(self, s: TryStmt) -> None:
         self.sem.analyze_try_stmt(s, self, add_global=True)
 
-    def analyze_lvalue(self, lvalue: Node, explicit_type: bool = False) -> None:
+    def analyze_lvalue(self, lvalue: Lvalue, explicit_type: bool = False) -> None:
         self.sem.analyze_lvalue(lvalue, add_global=True, explicit_type=explicit_type)
 
 
@@ -2668,15 +2764,16 @@ class ThirdPass(TraverserVisitor):
         self.modules = modules
         self.errors = errors
 
-    def visit_file(self, file_node: MypyFile, fnam: str) -> None:
+    def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None:
         self.errors.set_file(fnam)
+        self.options = options
         self.accept(file_node)
 
     def accept(self, node: Node) -> None:
         try:
             node.accept(self)
         except Exception as err:
-            report_internal_error(err, self.errors.file, node.line, self.errors)
+            report_internal_error(err, self.errors.file, node.line, self.errors, self.options)
 
     def visit_block(self, b: Block) -> None:
         if b.is_unreachable:
@@ -2774,7 +2871,7 @@ class ThirdPass(TraverserVisitor):
             type.accept(analyzer)
 
     def fail(self, msg: str, ctx: Context, *, blocker: bool = False) -> None:
-        self.errors.report(ctx.get_line(), msg)
+        self.errors.report(ctx.get_line(), ctx.get_column(), msg)
 
     def fail_blocker(self, msg: str, ctx: Context) -> None:
         self.fail(msg, ctx, blocker=True)
@@ -2796,8 +2893,7 @@ def self_type(typ: TypeInfo) -> Union[Instance, TupleType]:
     inst = Instance(typ, tv)
     if typ.tuple_type is None:
         return inst
-    else:
-        return TupleType(typ.tuple_type.items, inst)
+    return typ.tuple_type.copy_modified(fallback=inst)
 
 
 def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike:
@@ -2820,12 +2916,12 @@ def set_callable_name(sig: Type, fdef: FuncDef) -> Type:
         return sig
 
 
-def refers_to_fullname(node: Node, fullname: str) -> bool:
+def refers_to_fullname(node: Expression, fullname: str) -> bool:
     """Is node a name or member expression with the given full name?"""
     return isinstance(node, RefExpr) and node.fullname == fullname
 
 
-def refers_to_class_or_function(node: Node) -> bool:
+def refers_to_class_or_function(node: Expression) -> bool:
     """Does semantically analyzed node refer to a class?"""
     return (isinstance(node, RefExpr) and
             isinstance(node.node, (TypeInfo, FuncDef, OverloadedFuncDef)))
@@ -2898,7 +2994,7 @@ def infer_reachability_of_if_statement(s: IfStmt,
             break
 
 
-def infer_if_condition_value(expr: Node, pyversion: Tuple[int, int], platform: str) -> int:
+def infer_if_condition_value(expr: Expression, pyversion: Tuple[int, int], platform: str) -> int:
     """Infer whether if condition is always true/false.
 
     Return ALWAYS_TRUE if always true, ALWAYS_FALSE if always false,
@@ -2925,7 +3021,7 @@ def infer_if_condition_value(expr: Node, pyversion: Tuple[int, int], platform: s
             result = ALWAYS_TRUE if pyversion[0] == 2 else ALWAYS_FALSE
         elif name == 'PY3':
             result = ALWAYS_TRUE if pyversion[0] == 3 else ALWAYS_FALSE
-        elif name == 'MYPY':
+        elif name == 'MYPY' or name == 'TYPE_CHECKING':
             result = ALWAYS_TRUE
     if negated:
         if result == ALWAYS_TRUE:
@@ -2935,7 +3031,7 @@ def infer_if_condition_value(expr: Node, pyversion: Tuple[int, int], platform: s
     return result
 
 
-def consider_sys_version_info(expr: Node, pyversion: Tuple[int, ...]) -> int:
+def consider_sys_version_info(expr: Expression, pyversion: Tuple[int, ...]) -> int:
     """Consider whether expr is a comparison involving sys.version_info.
 
     Return ALWAYS_TRUE, ALWAYS_FALSE, or TRUTH_VALUE_UNKNOWN.
@@ -2977,7 +3073,7 @@ def consider_sys_version_info(expr: Node, pyversion: Tuple[int, ...]) -> int:
     return TRUTH_VALUE_UNKNOWN
 
 
-def consider_sys_platform(expr: Node, platform: str) -> int:
+def consider_sys_platform(expr: Expression, platform: str) -> int:
     """Consider whether expr is a comparison involving sys.platform.
 
     Return ALWAYS_TRUE, ALWAYS_FALSE, or TRUTH_VALUE_UNKNOWN.
@@ -3036,7 +3132,8 @@ def fixed_comparison(left: Targ, op: str, right: Targ) -> int:
     return TRUTH_VALUE_UNKNOWN
 
 
-def contains_int_or_tuple_of_ints(expr: Node) -> Union[None, int, Tuple[int], Tuple[int, ...]]:
+def contains_int_or_tuple_of_ints(expr: Expression
+                                  ) -> Union[None, int, Tuple[int], Tuple[int, ...]]:
     if isinstance(expr, IntExpr):
         return expr.value
     if isinstance(expr, TupleExpr):
@@ -3050,7 +3147,8 @@ def contains_int_or_tuple_of_ints(expr: Node) -> Union[None, int, Tuple[int], Tu
     return None
 
 
-def contains_sys_version_info(expr: Node) -> Union[None, int, Tuple[Optional[int], Optional[int]]]:
+def contains_sys_version_info(expr: Expression
+                              ) -> Union[None, int, Tuple[Optional[int], Optional[int]]]:
     if is_sys_attr(expr, 'version_info'):
         return (None, None)  # Same as sys.version_info[:]
     if isinstance(expr, IndexExpr) and is_sys_attr(expr.base, 'version_info'):
@@ -3074,7 +3172,7 @@ def contains_sys_version_info(expr: Node) -> Union[None, int, Tuple[Optional[int
     return None
 
 
-def is_sys_attr(expr: Node, name: str) -> bool:
+def is_sys_attr(expr: Expression, name: str) -> bool:
     # TODO: This currently doesn't work with code like this:
     # - import sys as _sys
     # - from sys import version_info
@@ -3112,7 +3210,7 @@ def is_identity_signature(sig: Type) -> bool:
     return False
 
 
-def returns_any_if_called(expr: Node) -> bool:
+def returns_any_if_called(expr: Expression) -> bool:
     """Return True if we can predict that expr will return Any if called.
 
     This only uses information available during semantic analysis so this
@@ -3135,7 +3233,7 @@ def returns_any_if_called(expr: Node) -> bool:
     return False
 
 
-def find_fixed_callable_return(expr: Node) -> Optional[CallableType]:
+def find_fixed_callable_return(expr: Expression) -> Optional[CallableType]:
     if isinstance(expr, RefExpr):
         if isinstance(expr.node, FuncDef):
             typ = expr.node.type
diff --git a/mypy/stats.py b/mypy/stats.py
index ac914f4..e6c6111 100644
--- a/mypy/stats.py
+++ b/mypy/stats.py
@@ -2,7 +2,6 @@
 
 import cgi
 import os.path
-import re
 
 from typing import Any, Dict, List, cast, Tuple
 
@@ -13,7 +12,7 @@ from mypy.types import (
 )
 from mypy import nodes
 from mypy.nodes import (
-    Node, FuncDef, TypeApplication, AssignmentStmt, NameExpr, CallExpr,
+    Node, FuncDef, TypeApplication, AssignmentStmt, NameExpr, CallExpr, MypyFile,
     MemberExpr, OpExpr, ComparisonExpr, IndexExpr, UnaryExpr, YieldFromExpr
 )
 
@@ -198,7 +197,7 @@ class StatisticsVisitor(TraverserVisitor):
                                   self.line_map.get(line, TYPE_PRECISE))
 
 
-def dump_type_stats(tree: Node, path: str, inferred: bool = False,
+def dump_type_stats(tree: MypyFile, path: str, inferred: bool = False,
                     typemap: Dict[Node, Type] = None) -> None:
     if is_special_module(path):
         return
@@ -266,7 +265,7 @@ def is_complex(t: Type) -> bool:
 html_files = []  # type: List[Tuple[str, str, int, int]]
 
 
-def generate_html_report(tree: Node, path: str, type_map: Dict[Node, Type],
+def generate_html_report(tree: MypyFile, path: str, type_map: Dict[Node, Type],
                          output_dir: str) -> None:
     if is_special_module(path):
         return
diff --git a/mypy/strconv.py b/mypy/strconv.py
index c2461ca..5c8c37b 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -3,7 +3,7 @@
 import re
 import os
 
-from typing import Any, List
+from typing import Any, List, Tuple, Optional, Union
 
 from mypy.util import dump_tagged, short_type
 import mypy.nodes
@@ -36,11 +36,11 @@ class StrConv(NodeVisitor[str]):
         array with information specific to methods, global functions or
         anonymous functions.
         """
-        args = []
-        init = []
-        extra = []
+        args = []  # type: List[mypy.nodes.Var]
+        init = []  # type: List[Optional[mypy.nodes.AssignmentStmt]]
+        extra = []  # type: List[Tuple[str, List[mypy.nodes.Var]]]
         for i, arg in enumerate(o.arguments):
-            kind = arg.kind
+            kind = arg.kind  # type: int
             if kind == mypy.nodes.ARG_POS:
                 args.append(o.arguments[i].variable)
             elif kind in (mypy.nodes.ARG_OPT, mypy.nodes.ARG_NAMED):
@@ -91,7 +91,7 @@ class StrConv(NodeVisitor[str]):
                 a.append(id)
         return 'Import:{}({})'.format(o.line, ', '.join(a))
 
-    def visit_import_from(self, o):
+    def visit_import_from(self, o: 'mypy.nodes.ImportFrom') -> str:
         a = []
         for name, as_name in o.names:
             if as_name is not None:
@@ -100,12 +100,12 @@ class StrConv(NodeVisitor[str]):
                 a.append(name)
         return 'ImportFrom:{}({}, [{}])'.format(o.line, "." * o.relative + o.id, ', '.join(a))
 
-    def visit_import_all(self, o):
+    def visit_import_all(self, o: 'mypy.nodes.ImportAll') -> str:
         return 'ImportAll:{}({})'.format(o.line, "." * o.relative + o.id)
 
     # Definitions
 
-    def visit_func_def(self, o):
+    def visit_func_def(self, o: 'mypy.nodes.FuncDef') -> str:
         a = self.func_helper(o)
         a.insert(0, o.name())
         if mypy.nodes.ARG_NAMED in [arg.kind for arg in o.arguments]:
@@ -120,13 +120,13 @@ class StrConv(NodeVisitor[str]):
             a.insert(-1, 'Property')
         return self.dump(a, o)
 
-    def visit_overloaded_func_def(self, o):
-        a = o.items[:]
+    def visit_overloaded_func_def(self, o: 'mypy.nodes.OverloadedFuncDef') -> str:
+        a = o.items[:]  # type: Any
         if o.type:
             a.insert(0, o.type)
         return self.dump(a, o)
 
-    def visit_class_def(self, o):
+    def visit_class_def(self, o: 'mypy.nodes.ClassDef') -> str:
         a = [o.name, o.defs.body]
         # Display base types unless they are implicitly just builtins.object
         # (in this case base_type_exprs is empty).
@@ -151,7 +151,7 @@ class StrConv(NodeVisitor[str]):
             a.insert(1, 'FallbackToAny')
         return self.dump(a, o)
 
-    def visit_var(self, o):
+    def visit_var(self, o: 'mypy.nodes.Var') -> str:
         l = ''
         # Add :nil line number tag if no line number is specified to remain
         # compatible with old test case descriptions that assume this.
@@ -159,27 +159,25 @@ class StrConv(NodeVisitor[str]):
             l = ':nil'
         return 'Var' + l + '(' + o.name() + ')'
 
-    def visit_global_decl(self, o):
+    def visit_global_decl(self, o: 'mypy.nodes.GlobalDecl') -> str:
         return self.dump([o.names], o)
 
-    def visit_nonlocal_decl(self, o):
+    def visit_nonlocal_decl(self, o: 'mypy.nodes.NonlocalDecl') -> str:
         return self.dump([o.names], o)
 
-    def visit_decorator(self, o):
+    def visit_decorator(self, o: 'mypy.nodes.Decorator') -> str:
         return self.dump([o.var, o.decorators, o.func], o)
 
-    def visit_annotation(self, o):
-        return 'Type:{}({})'.format(o.line, o.type)
-
     # Statements
 
-    def visit_block(self, o):
+    def visit_block(self, o: 'mypy.nodes.Block') -> str:
         return self.dump(o.body, o)
 
-    def visit_expression_stmt(self, o):
+    def visit_expression_stmt(self, o: 'mypy.nodes.ExpressionStmt') -> str:
         return self.dump([o.expr], o)
 
-    def visit_assignment_stmt(self, o):
+    def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> str:
+        a = []  # type: List[Any]
         if len(o.lvalues) > 1:
             a = [('Lvalues', o.lvalues)]
         else:
@@ -189,17 +187,17 @@ class StrConv(NodeVisitor[str]):
             a.append(o.type)
         return self.dump(a, o)
 
-    def visit_operator_assignment_stmt(self, o):
+    def visit_operator_assignment_stmt(self, o: 'mypy.nodes.OperatorAssignmentStmt') -> str:
         return self.dump([o.op, o.lvalue, o.rvalue], o)
 
-    def visit_while_stmt(self, o):
-        a = [o.expr, o.body]
+    def visit_while_stmt(self, o: 'mypy.nodes.WhileStmt') -> str:
+        a = [o.expr, o.body]  # type: List[Any]
         if o.else_body:
             a.append(('Else', o.else_body.body))
         return self.dump(a, o)
 
-    def visit_for_stmt(self, o):
-        a = []
+    def visit_for_stmt(self, o: 'mypy.nodes.ForStmt') -> str:
+        a = []  # type: List[Any]
         if o.is_async:
             a.append(('Async', ''))
         a.extend([o.index, o.expr, o.body])
@@ -207,11 +205,11 @@ class StrConv(NodeVisitor[str]):
             a.append(('Else', o.else_body.body))
         return self.dump(a, o)
 
-    def visit_return_stmt(self, o):
+    def visit_return_stmt(self, o: 'mypy.nodes.ReturnStmt') -> str:
         return self.dump([o.expr], o)
 
-    def visit_if_stmt(self, o):
-        a = []
+    def visit_if_stmt(self, o: 'mypy.nodes.IfStmt') -> str:
+        a = []  # type: List[Any]
         for i in range(len(o.expr)):
             a.append(('If', [o.expr[i]]))
             a.append(('Then', o.body[i].body))
@@ -221,38 +219,29 @@ class StrConv(NodeVisitor[str]):
         else:
             return self.dump([a, ('Else', o.else_body.body)], o)
 
-    def visit_break_stmt(self, o):
+    def visit_break_stmt(self, o: 'mypy.nodes.BreakStmt') -> str:
         return self.dump([], o)
 
-    def visit_continue_stmt(self, o):
+    def visit_continue_stmt(self, o: 'mypy.nodes.ContinueStmt') -> str:
         return self.dump([], o)
 
-    def visit_pass_stmt(self, o):
+    def visit_pass_stmt(self, o: 'mypy.nodes.PassStmt') -> str:
         return self.dump([], o)
 
-    def visit_raise_stmt(self, o):
+    def visit_raise_stmt(self, o: 'mypy.nodes.RaiseStmt') -> str:
         return self.dump([o.expr, o.from_expr], o)
 
-    def visit_assert_stmt(self, o):
+    def visit_assert_stmt(self, o: 'mypy.nodes.AssertStmt') -> str:
         return self.dump([o.expr], o)
 
-    def visit_yield_stmt(self, o):
+    def visit_await_expr(self, o: 'mypy.nodes.AwaitExpr') -> str:
         return self.dump([o.expr], o)
 
-    def visit_yield_from_stmt(self, o):
+    def visit_del_stmt(self, o: 'mypy.nodes.DelStmt') -> str:
         return self.dump([o.expr], o)
 
-    def visit_yield_expr(self, o):
-        return self.dump([o.expr], o)
-
-    def visit_await_expr(self, o):
-        return self.dump([o.expr], o)
-
-    def visit_del_stmt(self, o):
-        return self.dump([o.expr], o)
-
-    def visit_try_stmt(self, o):
-        a = [o.body]
+    def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> str:
+        a = [o.body]  # type: List[Any]
 
         for i in range(len(o.vars)):
             a.append(o.types[i])
@@ -267,8 +256,8 @@ class StrConv(NodeVisitor[str]):
 
         return self.dump(a, o)
 
-    def visit_with_stmt(self, o):
-        a = []
+    def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> str:
+        a = []  # type: List[Any]
         if o.is_async:
             a.append(('Async', ''))
         for i in range(len(o.expr)):
@@ -277,31 +266,31 @@ class StrConv(NodeVisitor[str]):
                 a.append(('Target', [o.target[i]]))
         return self.dump(a + [o.body], o)
 
-    def visit_print_stmt(self, o):
-        a = o.args[:]
+    def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> str:
+        a = o.args[:]  # type: List[Any]
         if o.target:
             a.append(('Target', [o.target]))
         if o.newline:
             a.append('Newline')
         return self.dump(a, o)
 
-    def visit_exec_stmt(self, o):
+    def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> str:
         return self.dump([o.expr, o.variables1, o.variables2], o)
 
     # Expressions
 
     # Simple expressions
 
-    def visit_int_expr(self, o):
+    def visit_int_expr(self, o: 'mypy.nodes.IntExpr') -> str:
         return 'IntExpr({})'.format(o.value)
 
-    def visit_str_expr(self, o):
+    def visit_str_expr(self, o: 'mypy.nodes.StrExpr') -> str:
         return 'StrExpr({})'.format(self.str_repr(o.value))
 
-    def visit_bytes_expr(self, o):
+    def visit_bytes_expr(self, o: 'mypy.nodes.BytesExpr') -> str:
         return 'BytesExpr({})'.format(self.str_repr(o.value))
 
-    def visit_unicode_expr(self, o):
+    def visit_unicode_expr(self, o: 'mypy.nodes.UnicodeExpr') -> str:
         return 'UnicodeExpr({})'.format(self.str_repr(o.value))
 
     def str_repr(self, s):
@@ -309,24 +298,24 @@ class StrConv(NodeVisitor[str]):
         return re.sub('[^\\x20-\\x7e]',
                       lambda m: r'\u%.4x' % ord(m.group(0)), s)
 
-    def visit_float_expr(self, o):
+    def visit_float_expr(self, o: 'mypy.nodes.FloatExpr') -> str:
         return 'FloatExpr({})'.format(o.value)
 
-    def visit_complex_expr(self, o):
+    def visit_complex_expr(self, o: 'mypy.nodes.ComplexExpr') -> str:
         return 'ComplexExpr({})'.format(o.value)
 
-    def visit_ellipsis(self, o):
+    def visit_ellipsis(self, o: 'mypy.nodes.EllipsisExpr') -> str:
         return 'Ellipsis'
 
-    def visit_star_expr(self, o):
+    def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> str:
         return self.dump([o.expr], o)
 
-    def visit_name_expr(self, o):
+    def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> str:
         return (short_type(o) + '(' + self.pretty_name(o.name, o.kind,
                                                        o.fullname, o.is_def)
                 + ')')
 
-    def pretty_name(self, name, kind, fullname, is_def):
+    def pretty_name(self, name: str, kind: int, fullname: str, is_def: bool) -> str:
         n = name
         if is_def:
             n += '*'
@@ -342,21 +331,24 @@ class StrConv(NodeVisitor[str]):
             n += ' [m]'
         return n
 
-    def visit_member_expr(self, o):
+    def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> str:
         return self.dump([o.expr, self.pretty_name(o.name, o.kind, o.fullname,
                                                    o.is_def)], o)
 
-    def visit_yield_from_expr(self, o):
+    def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> str:
+        return self.dump([o.expr], o)
+
+    def visit_yield_from_expr(self, o: 'mypy.nodes.YieldFromExpr') -> str:
         if o.expr:
             return self.dump([o.expr.accept(self)], o)
         else:
             return self.dump([], o)
 
-    def visit_call_expr(self, o):
+    def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> str:
         if o.analyzed:
             return o.analyzed.accept(self)
-        args = []
-        extra = []
+        args = []  # type: List[mypy.nodes.Node]
+        extra = []  # type: List[Union[str, Tuple[str, List[Any]]]]
         for i, kind in enumerate(o.arg_kinds):
             if kind in [mypy.nodes.ARG_POS, mypy.nodes.ARG_STAR]:
                 args.append(o.args[i])
@@ -368,50 +360,50 @@ class StrConv(NodeVisitor[str]):
                 extra.append(('DictVarArg', [o.args[i]]))
             else:
                 raise RuntimeError('unknown kind %d' % kind)
+        a = [o.callee, ('Args', args)]  # type: List[Any]
+        return self.dump(a + extra, o)
 
-        return self.dump([o.callee, ('Args', args)] + extra, o)
-
-    def visit_op_expr(self, o):
+    def visit_op_expr(self, o: 'mypy.nodes.OpExpr') -> str:
         return self.dump([o.op, o.left, o.right], o)
 
-    def visit_comparison_expr(self, o):
+    def visit_comparison_expr(self, o: 'mypy.nodes.ComparisonExpr') -> str:
         return self.dump([o.operators, o.operands], o)
 
-    def visit_cast_expr(self, o):
+    def visit_cast_expr(self, o: 'mypy.nodes.CastExpr') -> str:
         return self.dump([o.expr, o.type], o)
 
-    def visit_reveal_type_expr(self, o):
+    def visit_reveal_type_expr(self, o: 'mypy.nodes.RevealTypeExpr') -> str:
         return self.dump([o.expr], o)
 
-    def visit_unary_expr(self, o):
+    def visit_unary_expr(self, o: 'mypy.nodes.UnaryExpr') -> str:
         return self.dump([o.op, o.expr], o)
 
-    def visit_list_expr(self, o):
+    def visit_list_expr(self, o: 'mypy.nodes.ListExpr') -> str:
         return self.dump(o.items, o)
 
-    def visit_dict_expr(self, o):
+    def visit_dict_expr(self, o: 'mypy.nodes.DictExpr') -> str:
         return self.dump([[k, v] for k, v in o.items], o)
 
-    def visit_set_expr(self, o):
+    def visit_set_expr(self, o: 'mypy.nodes.SetExpr') -> str:
         return self.dump(o.items, o)
 
-    def visit_tuple_expr(self, o):
+    def visit_tuple_expr(self, o: 'mypy.nodes.TupleExpr') -> str:
         return self.dump(o.items, o)
 
-    def visit_index_expr(self, o):
+    def visit_index_expr(self, o: 'mypy.nodes.IndexExpr') -> str:
         if o.analyzed:
             return o.analyzed.accept(self)
         return self.dump([o.base, o.index], o)
 
-    def visit_super_expr(self, o):
+    def visit_super_expr(self, o: 'mypy.nodes.SuperExpr') -> str:
         return self.dump([o.name], o)
 
-    def visit_type_application(self, o):
+    def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> str:
         return self.dump([o.expr, ('Types', o.types)], o)
 
-    def visit_type_var_expr(self, o):
+    def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> str:
         import mypy.types
-        a = []
+        a = []  # type: List[Any]
         if o.variance == mypy.nodes.COVARIANT:
             a += ['Variance(COVARIANT)']
         if o.variance == mypy.nodes.CONTRAVARIANT:
@@ -422,48 +414,49 @@ class StrConv(NodeVisitor[str]):
             a += ['UpperBound({})'.format(o.upper_bound)]
         return self.dump(a, o)
 
-    def visit_type_alias_expr(self, o):
+    def visit_type_alias_expr(self, o: 'mypy.nodes.TypeAliasExpr') -> str:
         return 'TypeAliasExpr({})'.format(o.type)
 
-    def visit_namedtuple_expr(self, o):
+    def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> str:
         return 'NamedTupleExpr:{}({}, {})'.format(o.line,
                                                   o.info.name(),
                                                   o.info.tuple_type)
 
-    def visit__promote_expr(self, o):
+    def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> str:
         return 'PromoteExpr:{}({})'.format(o.line, o.type)
 
-    def visit_newtype_expr(self, o):
-        return 'NewTypeExpr:{}({}, {})'.format(o.line, o.fullname(), self.dump([o.value], o))
+    def visit_newtype_expr(self, o: 'mypy.nodes.NewTypeExpr') -> str:
+        return 'NewTypeExpr:{}({}, {})'.format(o.line, o.name,
+                                               self.dump([o.old_type], o))
 
-    def visit_func_expr(self, o):
+    def visit_func_expr(self, o: 'mypy.nodes.FuncExpr') -> str:
         a = self.func_helper(o)
         return self.dump(a, o)
 
-    def visit_generator_expr(self, o):
+    def visit_generator_expr(self, o: 'mypy.nodes.GeneratorExpr') -> str:
         condlists = o.condlists if any(o.condlists) else None
         return self.dump([o.left_expr, o.indices, o.sequences, condlists], o)
 
-    def visit_list_comprehension(self, o):
+    def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> str:
         return self.dump([o.generator], o)
 
-    def visit_set_comprehension(self, o):
+    def visit_set_comprehension(self, o: 'mypy.nodes.SetComprehension') -> str:
         return self.dump([o.generator], o)
 
-    def visit_dictionary_comprehension(self, o):
+    def visit_dictionary_comprehension(self, o: 'mypy.nodes.DictionaryComprehension') -> str:
         condlists = o.condlists if any(o.condlists) else None
         return self.dump([o.key, o.value, o.indices, o.sequences, condlists], o)
 
-    def visit_conditional_expr(self, o):
+    def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> str:
         return self.dump([('Condition', [o.cond]), o.if_expr, o.else_expr], o)
 
-    def visit_slice_expr(self, o):
-        a = [o.begin_index, o.end_index, o.stride]
+    def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> str:
+        a = [o.begin_index, o.end_index, o.stride]  # type: List[Any]
         if not a[0]:
             a[0] = '<empty>'
         if not a[1]:
             a[1] = '<empty>'
         return self.dump(a, o)
 
-    def visit_backquote_expr(self, o):
+    def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> str:
         return self.dump([o.expr], o)
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index b765105..2bf7965 100644
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -40,6 +40,7 @@ import imp
 import importlib
 import json
 import os.path
+import pkgutil
 import subprocess
 import sys
 import textwrap
@@ -52,7 +53,7 @@ import mypy.errors
 import mypy.traverser
 from mypy import defaults
 from mypy.nodes import (
-    Node, IntExpr, UnaryExpr, StrExpr, BytesExpr, NameExpr, FloatExpr, MemberExpr, TupleExpr,
+    Expression, IntExpr, UnaryExpr, StrExpr, BytesExpr, NameExpr, FloatExpr, MemberExpr, TupleExpr,
     ListExpr, ComparisonExpr, CallExpr, ClassDef, MypyFile, Decorator, AssignmentStmt,
     IfStmt, ImportAll, ImportFrom, Import, FuncDef, FuncBase, ARG_STAR, ARG_STAR2, ARG_NAMED
 )
@@ -66,7 +67,10 @@ Options = NamedTuple('Options', [('pyversion', Tuple[int, int]),
                                  ('doc_dir', str),
                                  ('search_path', List[str]),
                                  ('interpreter', str),
-                                 ('modules', List[str])])
+                                 ('modules', List[str]),
+                                 ('ignore_errors', bool),
+                                 ('recursive', bool),
+                                 ])
 
 
 def generate_stub_for_module(module: str, output_dir: str, quiet: bool = False,
@@ -239,8 +243,8 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
             self.add('\n')
         if not self.is_top_level():
             self_inits = find_self_initializers(o)
-            for init in self_inits:
-                init_code = self.get_init(init)
+            for init, value in self_inits:
+                init_code = self.get_init(init, value)
                 if init_code:
                     self.add(init_code)
         self.add("%sdef %s(" % (self._indent, o.name()))
@@ -254,22 +258,8 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
             if init_stmt:
                 if kind == ARG_NAMED and '*' not in args:
                     args.append('*')
-                arg = '%s=' % name
-                rvalue = init_stmt.rvalue
-                if isinstance(rvalue, IntExpr):
-                    arg += str(rvalue.value)
-                elif isinstance(rvalue, StrExpr):
-                    arg += "''"
-                elif isinstance(rvalue, BytesExpr):
-                    arg += "b''"
-                elif isinstance(rvalue, FloatExpr):
-                    arg += "0.0"
-                elif isinstance(rvalue, UnaryExpr) and isinstance(rvalue.expr, IntExpr):
-                    arg += '-%s' % rvalue.expr.value
-                elif isinstance(rvalue, NameExpr) and rvalue.name in ('None', 'True', 'False'):
-                    arg += rvalue.name
-                else:
-                    arg += '...'
+                typename = self.get_str_type_of_node(init_stmt.rvalue, True)
+                arg = '{}: {} = ...'.format(name, typename)
             elif kind == ARG_STAR:
                 arg = '*%s' % name
             elif kind == ARG_STAR2:
@@ -277,8 +267,15 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
             else:
                 arg = name
             args.append(arg)
+        retname = None
+        if o.name() == '__init__':
+            retname = 'None'
+        retfield = ''
+        if retname is not None:
+            retfield = ' -> ' + retname
+
         self.add(', '.join(args))
-        self.add("): ...\n")
+        self.add("){}: ...\n".format(retfield))
         self._state = FUNC
 
     def visit_decorator(self, o: Decorator) -> None:
@@ -349,7 +346,7 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
             found = False
             for item in items:
                 if isinstance(item, NameExpr):
-                    init = self.get_init(item.name)
+                    init = self.get_init(item.name, o.rvalue)
                     if init:
                         found = True
                         if not sep and not self._indent and \
@@ -363,7 +360,7 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
         if all(foundl):
             self._state = VAR
 
-    def is_namedtuple(self, expr: Node) -> bool:
+    def is_namedtuple(self, expr: Expression) -> bool:
         if not isinstance(expr, CallExpr):
             return False
         callee = expr.callee
@@ -448,7 +445,7 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
                 self.add_import_line('import %s as %s\n' % (id, target_name))
                 self.record_name(target_name)
 
-    def get_init(self, lvalue: str) -> str:
+    def get_init(self, lvalue: str, rvalue: Expression) -> str:
         """Return initializer for a variable.
 
         Return None if we've generated one already or if the variable is internal.
@@ -460,8 +457,8 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
         if self.is_private_name(lvalue) or self.is_not_in_all(lvalue):
             return None
         self._vars[-1].append(lvalue)
-        self.add_typing_import('Any')
-        return '%s%s = ...  # type: Any\n' % (self._indent, lvalue)
+        typename = self.get_str_type_of_node(rvalue)
+        return '%s%s = ...  # type: %s\n' % (self._indent, lvalue, typename)
 
     def add(self, string: str) -> None:
         """Add text to generated stub."""
@@ -484,7 +481,7 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
         """Return the text for the stub."""
         imports = ''
         if self._imports:
-            imports += 'from typing import %s\n' % ", ".join(self._imports)
+            imports += 'from typing import %s\n' % ", ".join(sorted(self._imports))
         if self._import_lines:
             imports += ''.join(self._import_lines)
         if imports and self._output:
@@ -507,6 +504,28 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
                                                      '__setstate__',
                                                      '__slots__'))
 
+    def get_str_type_of_node(self, rvalue: Expression,
+                             can_infer_optional: bool = False) -> str:
+        if isinstance(rvalue, IntExpr):
+            return 'int'
+        if isinstance(rvalue, StrExpr):
+            return 'str'
+        if isinstance(rvalue, BytesExpr):
+            return 'bytes'
+        if isinstance(rvalue, FloatExpr):
+            return 'float'
+        if isinstance(rvalue, UnaryExpr) and isinstance(rvalue.expr, IntExpr):
+            return 'int'
+        if isinstance(rvalue, NameExpr) and rvalue.name in ('True', 'False'):
+            return 'bool'
+        if can_infer_optional and \
+           isinstance(rvalue, NameExpr) and rvalue.name == 'None':
+            self.add_typing_import('Optional')
+            self.add_typing_import('Any')
+            return 'Optional[Any]'
+        self.add_typing_import('Any')
+        return 'Any'
+
     def is_top_level(self) -> bool:
         """Are we processing the top level of a file?"""
         return self._indent == ''
@@ -524,8 +543,8 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
         return self.is_top_level() and name in self._toplevel_names
 
 
-def find_self_initializers(fdef: FuncBase) -> List[str]:
-    results = []  # type: List[str]
+def find_self_initializers(fdef: FuncBase) -> List[Tuple[str, Expression]]:
+    results = []  # type: List[Tuple[str, Expression]]
 
     class SelfTraverser(mypy.traverser.TraverserVisitor):
         def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
@@ -533,13 +552,13 @@ def find_self_initializers(fdef: FuncBase) -> List[str]:
             if (isinstance(lvalue, MemberExpr) and
                     isinstance(lvalue.expr, NameExpr) and
                     lvalue.expr.name == 'self'):
-                results.append(lvalue.name)
+                results.append((lvalue.name, o.rvalue))
 
     fdef.accept(SelfTraverser())
     return results
 
 
-def find_classes(node: Node) -> Set[str]:
+def find_classes(node: MypyFile) -> Set[str]:
     results = set()  # type: Set[str]
 
     class ClassTraverser(mypy.traverser.TraverserVisitor):
@@ -550,7 +569,7 @@ def find_classes(node: Node) -> Set[str]:
     return results
 
 
-def get_qualified_name(o: Node) -> str:
+def get_qualified_name(o: Expression) -> str:
     if isinstance(o, NameExpr):
         return o.name
     elif isinstance(o, MemberExpr):
@@ -559,10 +578,22 @@ def get_qualified_name(o: Node) -> str:
         return '<ERROR>'
 
 
+def walk_packages(packages: List[str]):
+    for package_name in packages:
+        package = __import__(package_name)
+        yield package.__name__
+        for importer, qualified_name, ispkg in pkgutil.walk_packages(package.__path__,
+                                                                     prefix=package.__name__ + ".",
+                                                                     onerror=lambda r: None):
+            yield qualified_name
+
+
 def main() -> None:
     options = parse_options()
     if not os.path.isdir('out'):
         raise SystemExit('Directory "out" does not exist')
+    if options.recursive and options.no_import:
+        raise SystemExit('recursive stub generation without importing is not currently supported')
     sigs = {}  # type: Any
     class_sigs = {}  # type: Any
     if options.doc_dir:
@@ -574,21 +605,29 @@ def main() -> None:
             all_class_sigs += class_sigs
         sigs = dict(find_unique_signatures(all_sigs))
         class_sigs = dict(find_unique_signatures(all_class_sigs))
-    for module in options.modules:
-        generate_stub_for_module(module, 'out',
-                                 add_header=True,
-                                 sigs=sigs,
-                                 class_sigs=class_sigs,
-                                 pyversion=options.pyversion,
-                                 no_import=options.no_import,
-                                 search_path=options.search_path,
-                                 interpreter=options.interpreter)
+    for module in (options.modules if not options.recursive else walk_packages(options.modules)):
+        try:
+            generate_stub_for_module(module, 'out',
+                                     add_header=True,
+                                     sigs=sigs,
+                                     class_sigs=class_sigs,
+                                     pyversion=options.pyversion,
+                                     no_import=options.no_import,
+                                     search_path=options.search_path,
+                                     interpreter=options.interpreter)
+        except Exception as e:
+            if not options.ignore_errors:
+                raise e
+            else:
+                print("Stub generation failed for", module, file=sys.stderr)
 
 
 def parse_options() -> Options:
     args = sys.argv[1:]
     pyversion = defaults.PYTHON3_VERSION
     no_import = False
+    recursive = False
+    ignore_errors = False
     doc_dir = ''
     search_path = []  # type: List[str]
     interpreter = ''
@@ -604,6 +643,10 @@ def parse_options() -> Options:
         elif args[0] == '-p':
             interpreter = args[1]
             args = args[1:]
+        elif args[0] == '--recursive':
+            recursive = True
+        elif args[0] == '--ignore-errors':
+            ignore_errors = True
         elif args[0] == '--py2':
             pyversion = defaults.PYTHON2_VERSION
         elif args[0] == '--no-import':
@@ -622,7 +665,9 @@ def parse_options() -> Options:
                    doc_dir=doc_dir,
                    search_path=search_path,
                    interpreter=interpreter,
-                   modules=args)
+                   modules=args,
+                   ignore_errors=ignore_errors,
+                   recursive=recursive)
 
 
 def default_python2_interpreter() -> str:
@@ -649,6 +694,8 @@ def usage() -> None:
 
         Options:
           --py2           run in Python 2 mode (default: Python 3 mode)
+          --recursive     traverse listed modules to generate inner package modules as well
+          --ignore-errors ignore errors when trying to generate stubs for modules
           --no-import     don't import the modules, just parse and analyze them
                           (doesn't work with C extension modules and doesn't
                           respect __all__)
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 3d9df0c..fff5df0 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -2,8 +2,8 @@ from typing import cast, List, Dict, Callable
 
 from mypy.types import (
     Type, AnyType, UnboundType, TypeVisitor, ErrorType, Void, NoneTyp,
-    Instance, TypeVarType, CallableType, TupleType, UnionType, Overloaded, ErasedType, TypeList,
-    PartialType, DeletedType, UninhabitedType, TypeType, is_named_instance
+    Instance, TypeVarType, CallableType, TupleType, UnionType, Overloaded,
+    ErasedType, TypeList, PartialType, DeletedType, UninhabitedType, TypeType, is_named_instance
 )
 import mypy.applytype
 import mypy.constraints
@@ -163,26 +163,23 @@ class SubtypeVisitor(TypeVisitor[bool]):
     def visit_tuple_type(self, left: TupleType) -> bool:
         right = self.right
         if isinstance(right, Instance):
-            if is_named_instance(right, 'builtins.object'):
+            if is_named_instance(right, 'typing.Sized'):
                 return True
-            if is_named_instance(right, 'builtins.tuple'):
-                target_item_type = right.args[0]
-                return all(is_subtype(item, target_item_type)
-                           for item in left.items)
-            elif is_named_instance(right, 'typing.Sized'):
-                return True
-            elif (is_named_instance(right, 'typing.Iterable') or
+            elif (is_named_instance(right, 'builtins.tuple') or
+                  is_named_instance(right, 'typing.Iterable') or
                   is_named_instance(right, 'typing.Container') or
                   is_named_instance(right, 'typing.Sequence') or
                   is_named_instance(right, 'typing.Reversible')):
                 iter_type = right.args[0]
                 return all(is_subtype(li, iter_type) for li in left.items)
+            elif is_subtype(left.fallback, right, self.check_type_parameter):
+                return True
             return False
         elif isinstance(right, TupleType):
             if len(left.items) != len(right.items):
                 return False
-            for i in range(len(left.items)):
-                if not is_subtype(left.items[i], right.items[i], self.check_type_parameter):
+            for l, r in zip(left.items, right.items):
+                if not is_subtype(l, r, self.check_type_parameter):
                     return False
             if not is_subtype(left.fallback, right.fallback, self.check_type_parameter):
                 return False
diff --git a/mypy/treetransform.py b/mypy/treetransform.py
index 06ea5cb..100ff78 100644
--- a/mypy/treetransform.py
+++ b/mypy/treetransform.py
@@ -16,12 +16,13 @@ from mypy.nodes import (
     UnicodeExpr, FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr,
     SliceExpr, OpExpr, UnaryExpr, FuncExpr, TypeApplication, PrintStmt,
     SymbolTable, RefExpr, TypeVarExpr, NewTypeExpr, PromoteExpr,
-    ComparisonExpr, TempNode, StarExpr,
+    ComparisonExpr, TempNode, StarExpr, Statement, Expression,
     YieldFromExpr, NamedTupleExpr, NonlocalDecl, SetComprehension,
     DictionaryComprehension, ComplexExpr, TypeAliasExpr, EllipsisExpr,
     YieldExpr, ExecStmt, Argument, BackquoteExpr, AwaitExpr,
 )
-from mypy.types import Type, FunctionLike, Instance
+from mypy.types import Type, FunctionLike
+from mypy.traverser import TraverserVisitor
 from mypy.visitor import NodeVisitor
 
 
@@ -36,7 +37,7 @@ class TransformVisitor(NodeVisitor[Node]):
 
      * Do not duplicate TypeInfo nodes. This would generally not be desirable.
      * Only update some name binding cross-references, but only those that
-       refer to Var nodes, not those targeting ClassDef, TypeInfo or FuncDef
+       refer to Var or FuncDef nodes, not those targeting ClassDef or TypeInfo
        nodes.
      * Types are not transformed, but you can override type() to also perform
        type transformation.
@@ -48,10 +49,15 @@ class TransformVisitor(NodeVisitor[Node]):
         # There may be multiple references to a Var node. Keep track of
         # Var translations using a dictionary.
         self.var_map = {}  # type: Dict[Var, Var]
+        # These are uninitialized placeholder nodes used temporarily for nested
+        # functions while we are transforming a top-level function. This maps an
+        # untransformed node to a placeholder (which will later become the
+        # transformed node).
+        self.func_placeholder_map = {}  # type: Dict[FuncDef, FuncDef]
 
-    def visit_mypy_file(self, node: MypyFile) -> Node:
+    def visit_mypy_file(self, node: MypyFile) -> MypyFile:
         # NOTE: The 'names' and 'imports' instance variables will be empty!
-        new = MypyFile(self.nodes(node.defs), [], node.is_bom,
+        new = MypyFile(self.statements(node.defs), [], node.is_bom,
                        ignored_lines=set(node.ignored_lines))
         new._name = node._name
         new._fullname = node._fullname
@@ -59,13 +65,13 @@ class TransformVisitor(NodeVisitor[Node]):
         new.names = SymbolTable()
         return new
 
-    def visit_import(self, node: Import) -> Node:
+    def visit_import(self, node: Import) -> Import:
         return Import(node.ids[:])
 
-    def visit_import_from(self, node: ImportFrom) -> Node:
+    def visit_import_from(self, node: ImportFrom) -> ImportFrom:
         return ImportFrom(node.id, node.relative, node.names[:])
 
-    def visit_import_all(self, node: ImportAll) -> Node:
+    def visit_import_all(self, node: ImportAll) -> ImportAll:
         return ImportAll(node.id, node.relative)
 
     def copy_argument(self, argument: Argument) -> Argument:
@@ -74,12 +80,12 @@ class TransformVisitor(NodeVisitor[Node]):
         if argument.initialization_statement:
             init_lvalue = cast(
                 NameExpr,
-                self.node(argument.initialization_statement.lvalues[0]),
+                self.expr(argument.initialization_statement.lvalues[0]),
             )
             init_lvalue.set_line(argument.line)
             init_stmt = AssignmentStmt(
                 [init_lvalue],
-                self.node(argument.initialization_statement.rvalue),
+                self.expr(argument.initialization_statement.rvalue),
                 self.optional_type(argument.initialization_statement.type),
             )
 
@@ -98,6 +104,18 @@ class TransformVisitor(NodeVisitor[Node]):
 
     def visit_func_def(self, node: FuncDef) -> FuncDef:
         # Note that a FuncDef must be transformed to a FuncDef.
+
+        # These contortions are needed to handle the case of recursive
+        # references inside the function being transformed.
+        # Set up placholder nodes for references within this function
+        # to other functions defined inside it.
+        # Don't create an entry for this function itself though,
+        # since we want self-references to point to the original
+        # function if this is the top-level node we are transforming.
+        init = FuncMapInitializer(self)
+        for stmt in node.body.body:
+            stmt.accept(init)
+
         new = FuncDef(node.name(),
                       [self.copy_argument(arg) for arg in node.arguments],
                       self.block(node.body),
@@ -113,9 +131,19 @@ class TransformVisitor(NodeVisitor[Node]):
         new.is_class = node.is_class
         new.is_property = node.is_property
         new.original_def = node.original_def
-        return new
 
-    def visit_func_expr(self, node: FuncExpr) -> Node:
+        if node in self.func_placeholder_map:
+            # There is a placeholder definition for this function. Replace
+            # the attributes of the placeholder with those form the transformed
+            # function. We know that the classes will be identical (otherwise
+            # this wouldn't work).
+            result = self.func_placeholder_map[node]
+            result.__dict__ = new.__dict__
+            return result
+        else:
+            return new
+
+    def visit_func_expr(self, node: FuncExpr) -> FuncExpr:
         new = FuncExpr([self.copy_argument(arg) for arg in node.arguments],
                        self.block(node.body),
                        cast(FunctionLike, self.optional_type(node.type)))
@@ -141,7 +169,7 @@ class TransformVisitor(NodeVisitor[Node]):
                 result.append(None)
         return result
 
-    def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> Node:
+    def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> OverloadedFuncDef:
         items = [self.visit_decorator(decorator)
                  for decorator in node.items]
         for newitem, olditem in zip(items, node.items):
@@ -152,33 +180,33 @@ class TransformVisitor(NodeVisitor[Node]):
         new.info = node.info
         return new
 
-    def visit_class_def(self, node: ClassDef) -> Node:
+    def visit_class_def(self, node: ClassDef) -> ClassDef:
         new = ClassDef(node.name,
                        self.block(node.defs),
                        node.type_vars,
-                       self.nodes(node.base_type_exprs),
+                       self.expressions(node.base_type_exprs),
                        node.metaclass)
         new.fullname = node.fullname
         new.info = node.info
-        new.decorators = [decorator.accept(self)
+        new.decorators = [self.expr(decorator)
                           for decorator in node.decorators]
         new.is_builtinclass = node.is_builtinclass
         return new
 
-    def visit_global_decl(self, node: GlobalDecl) -> Node:
+    def visit_global_decl(self, node: GlobalDecl) -> GlobalDecl:
         return GlobalDecl(node.names[:])
 
-    def visit_nonlocal_decl(self, node: NonlocalDecl) -> Node:
+    def visit_nonlocal_decl(self, node: NonlocalDecl) -> NonlocalDecl:
         return NonlocalDecl(node.names[:])
 
     def visit_block(self, node: Block) -> Block:
-        return Block(self.nodes(node.body))
+        return Block(self.statements(node.body))
 
     def visit_decorator(self, node: Decorator) -> Decorator:
         # Note that a Decorator must be transformed to a Decorator.
         func = self.visit_func_def(node.func)
         func.line = node.func.line
-        new = Decorator(func, self.nodes(node.decorators),
+        new = Decorator(func, self.expressions(node.decorators),
                         self.visit_var(node.var))
         new.is_overload = node.is_overload
         return new
@@ -201,123 +229,122 @@ class TransformVisitor(NodeVisitor[Node]):
         self.var_map[node] = new
         return new
 
-    def visit_expression_stmt(self, node: ExpressionStmt) -> Node:
-        return ExpressionStmt(self.node(node.expr))
+    def visit_expression_stmt(self, node: ExpressionStmt) -> ExpressionStmt:
+        return ExpressionStmt(self.expr(node.expr))
 
-    def visit_assignment_stmt(self, node: AssignmentStmt) -> Node:
+    def visit_assignment_stmt(self, node: AssignmentStmt) -> AssignmentStmt:
         return self.duplicate_assignment(node)
 
     def duplicate_assignment(self, node: AssignmentStmt) -> AssignmentStmt:
-        new = AssignmentStmt(self.nodes(node.lvalues),
-                             self.node(node.rvalue),
+        new = AssignmentStmt(self.expressions(node.lvalues),
+                             self.expr(node.rvalue),
                              self.optional_type(node.type))
         new.line = node.line
         return new
 
     def visit_operator_assignment_stmt(self,
-                                       node: OperatorAssignmentStmt) -> Node:
+                                       node: OperatorAssignmentStmt) -> OperatorAssignmentStmt:
         return OperatorAssignmentStmt(node.op,
-                                      self.node(node.lvalue),
-                                      self.node(node.rvalue))
+                                      self.expr(node.lvalue),
+                                      self.expr(node.rvalue))
 
-    def visit_while_stmt(self, node: WhileStmt) -> Node:
-        return WhileStmt(self.node(node.expr),
+    def visit_while_stmt(self, node: WhileStmt) -> WhileStmt:
+        return WhileStmt(self.expr(node.expr),
                          self.block(node.body),
                          self.optional_block(node.else_body))
 
-    def visit_for_stmt(self, node: ForStmt) -> Node:
-        return ForStmt(self.node(node.index),
-                       self.node(node.expr),
+    def visit_for_stmt(self, node: ForStmt) -> ForStmt:
+        return ForStmt(self.expr(node.index),
+                       self.expr(node.expr),
                        self.block(node.body),
                        self.optional_block(node.else_body))
 
-    def visit_return_stmt(self, node: ReturnStmt) -> Node:
-        return ReturnStmt(self.optional_node(node.expr))
+    def visit_return_stmt(self, node: ReturnStmt) -> ReturnStmt:
+        return ReturnStmt(self.optional_expr(node.expr))
 
-    def visit_assert_stmt(self, node: AssertStmt) -> Node:
-        return AssertStmt(self.node(node.expr))
+    def visit_assert_stmt(self, node: AssertStmt) -> AssertStmt:
+        return AssertStmt(self.expr(node.expr))
 
-    def visit_del_stmt(self, node: DelStmt) -> Node:
-        return DelStmt(self.node(node.expr))
+    def visit_del_stmt(self, node: DelStmt) -> DelStmt:
+        return DelStmt(self.expr(node.expr))
 
-    def visit_if_stmt(self, node: IfStmt) -> Node:
-        return IfStmt(self.nodes(node.expr),
+    def visit_if_stmt(self, node: IfStmt) -> IfStmt:
+        return IfStmt(self.expressions(node.expr),
                       self.blocks(node.body),
                       self.optional_block(node.else_body))
 
-    def visit_break_stmt(self, node: BreakStmt) -> Node:
+    def visit_break_stmt(self, node: BreakStmt) -> BreakStmt:
         return BreakStmt()
 
-    def visit_continue_stmt(self, node: ContinueStmt) -> Node:
+    def visit_continue_stmt(self, node: ContinueStmt) -> ContinueStmt:
         return ContinueStmt()
 
-    def visit_pass_stmt(self, node: PassStmt) -> Node:
+    def visit_pass_stmt(self, node: PassStmt) -> PassStmt:
         return PassStmt()
 
-    def visit_raise_stmt(self, node: RaiseStmt) -> Node:
-        return RaiseStmt(self.optional_node(node.expr),
-                         self.optional_node(node.from_expr))
+    def visit_raise_stmt(self, node: RaiseStmt) -> RaiseStmt:
+        return RaiseStmt(self.optional_expr(node.expr),
+                         self.optional_expr(node.from_expr))
 
-    def visit_try_stmt(self, node: TryStmt) -> Node:
+    def visit_try_stmt(self, node: TryStmt) -> TryStmt:
         return TryStmt(self.block(node.body),
                        self.optional_names(node.vars),
-                       self.optional_nodes(node.types),
+                       self.optional_expressions(node.types),
                        self.blocks(node.handlers),
                        self.optional_block(node.else_body),
                        self.optional_block(node.finally_body))
 
-    def visit_with_stmt(self, node: WithStmt) -> Node:
-        return WithStmt(self.nodes(node.expr),
-                        self.optional_nodes(node.target),
+    def visit_with_stmt(self, node: WithStmt) -> WithStmt:
+        return WithStmt(self.expressions(node.expr),
+                        self.optional_expressions(node.target),
                         self.block(node.body))
 
-    def visit_print_stmt(self, node: PrintStmt) -> Node:
-        return PrintStmt(self.nodes(node.args),
+    def visit_print_stmt(self, node: PrintStmt) -> PrintStmt:
+        return PrintStmt(self.expressions(node.args),
                          node.newline,
-                         self.optional_node(node.target))
+                         self.optional_expr(node.target))
 
-    def visit_exec_stmt(self, node: ExecStmt) -> Node:
-        return ExecStmt(self.node(node.expr),
-                        self.optional_node(node.variables1),
-                        self.optional_node(node.variables2))
+    def visit_exec_stmt(self, node: ExecStmt) -> ExecStmt:
+        return ExecStmt(self.expr(node.expr),
+                        self.optional_expr(node.variables1),
+                        self.optional_expr(node.variables2))
 
-    def visit_star_expr(self, node: StarExpr) -> Node:
+    def visit_star_expr(self, node: StarExpr) -> StarExpr:
         return StarExpr(node.expr)
 
-    def visit_int_expr(self, node: IntExpr) -> Node:
+    def visit_int_expr(self, node: IntExpr) -> IntExpr:
         return IntExpr(node.value)
 
-    def visit_str_expr(self, node: StrExpr) -> Node:
+    def visit_str_expr(self, node: StrExpr) -> StrExpr:
         return StrExpr(node.value)
 
-    def visit_bytes_expr(self, node: BytesExpr) -> Node:
+    def visit_bytes_expr(self, node: BytesExpr) -> BytesExpr:
         return BytesExpr(node.value)
 
-    def visit_unicode_expr(self, node: UnicodeExpr) -> Node:
+    def visit_unicode_expr(self, node: UnicodeExpr) -> UnicodeExpr:
         return UnicodeExpr(node.value)
 
-    def visit_float_expr(self, node: FloatExpr) -> Node:
+    def visit_float_expr(self, node: FloatExpr) -> FloatExpr:
         return FloatExpr(node.value)
 
-    def visit_complex_expr(self, node: ComplexExpr) -> Node:
+    def visit_complex_expr(self, node: ComplexExpr) -> ComplexExpr:
         return ComplexExpr(node.value)
 
-    def visit_ellipsis(self, node: EllipsisExpr) -> Node:
+    def visit_ellipsis(self, node: EllipsisExpr) -> EllipsisExpr:
         return EllipsisExpr()
 
-    def visit_name_expr(self, node: NameExpr) -> Node:
+    def visit_name_expr(self, node: NameExpr) -> NameExpr:
         return self.duplicate_name(node)
 
     def duplicate_name(self, node: NameExpr) -> NameExpr:
         # This method is used when the transform result must be a NameExpr.
         # visit_name_expr() is used when there is no such restriction.
         new = NameExpr(node.name)
-        new.info = node.info
         self.copy_ref(new, node)
         return new
 
-    def visit_member_expr(self, node: MemberExpr) -> Node:
-        member = MemberExpr(self.node(node.expr),
+    def visit_member_expr(self, node: MemberExpr) -> MemberExpr:
+        member = MemberExpr(self.expr(node.expr),
                             node.name)
         if node.def_var:
             member.def_var = self.visit_var(node.def_var)
@@ -330,67 +357,70 @@ class TransformVisitor(NodeVisitor[Node]):
         target = original.node
         if isinstance(target, Var):
             target = self.visit_var(target)
+        elif isinstance(target, FuncDef):
+            # Use a placeholder node for the function if it exists.
+            target = self.func_placeholder_map.get(target, target)
         new.node = target
         new.is_def = original.is_def
 
-    def visit_yield_from_expr(self, node: YieldFromExpr) -> Node:
-        return YieldFromExpr(self.node(node.expr))
+    def visit_yield_from_expr(self, node: YieldFromExpr) -> YieldFromExpr:
+        return YieldFromExpr(self.expr(node.expr))
 
-    def visit_yield_expr(self, node: YieldExpr) -> Node:
-        return YieldExpr(self.node(node.expr))
+    def visit_yield_expr(self, node: YieldExpr) -> YieldExpr:
+        return YieldExpr(self.expr(node.expr))
 
-    def visit_await_expr(self, node: AwaitExpr) -> Node:
-        return AwaitExpr(self.node(node.expr))
+    def visit_await_expr(self, node: AwaitExpr) -> AwaitExpr:
+        return AwaitExpr(self.expr(node.expr))
 
-    def visit_call_expr(self, node: CallExpr) -> Node:
-        return CallExpr(self.node(node.callee),
-                        self.nodes(node.args),
+    def visit_call_expr(self, node: CallExpr) -> CallExpr:
+        return CallExpr(self.expr(node.callee),
+                        self.expressions(node.args),
                         node.arg_kinds[:],
                         node.arg_names[:],
-                        self.optional_node(node.analyzed))
+                        self.optional_expr(node.analyzed))
 
-    def visit_op_expr(self, node: OpExpr) -> Node:
-        new = OpExpr(node.op, self.node(node.left), self.node(node.right))
+    def visit_op_expr(self, node: OpExpr) -> OpExpr:
+        new = OpExpr(node.op, self.expr(node.left), self.expr(node.right))
         new.method_type = self.optional_type(node.method_type)
         return new
 
-    def visit_comparison_expr(self, node: ComparisonExpr) -> Node:
-        new = ComparisonExpr(node.operators, self.nodes(node.operands))
+    def visit_comparison_expr(self, node: ComparisonExpr) -> ComparisonExpr:
+        new = ComparisonExpr(node.operators, self.expressions(node.operands))
         new.method_types = [self.optional_type(t) for t in node.method_types]
         return new
 
-    def visit_cast_expr(self, node: CastExpr) -> Node:
-        return CastExpr(self.node(node.expr),
+    def visit_cast_expr(self, node: CastExpr) -> CastExpr:
+        return CastExpr(self.expr(node.expr),
                         self.type(node.type))
 
-    def visit_reveal_type_expr(self, node: RevealTypeExpr) -> Node:
-        return RevealTypeExpr(self.node(node.expr))
+    def visit_reveal_type_expr(self, node: RevealTypeExpr) -> RevealTypeExpr:
+        return RevealTypeExpr(self.expr(node.expr))
 
-    def visit_super_expr(self, node: SuperExpr) -> Node:
+    def visit_super_expr(self, node: SuperExpr) -> SuperExpr:
         new = SuperExpr(node.name)
         new.info = node.info
         return new
 
-    def visit_unary_expr(self, node: UnaryExpr) -> Node:
-        new = UnaryExpr(node.op, self.node(node.expr))
+    def visit_unary_expr(self, node: UnaryExpr) -> UnaryExpr:
+        new = UnaryExpr(node.op, self.expr(node.expr))
         new.method_type = self.optional_type(node.method_type)
         return new
 
-    def visit_list_expr(self, node: ListExpr) -> Node:
-        return ListExpr(self.nodes(node.items))
+    def visit_list_expr(self, node: ListExpr) -> ListExpr:
+        return ListExpr(self.expressions(node.items))
 
-    def visit_dict_expr(self, node: DictExpr) -> Node:
-        return DictExpr([(self.node(key), self.node(value))
+    def visit_dict_expr(self, node: DictExpr) -> DictExpr:
+        return DictExpr([(self.expr(key), self.expr(value))
                          for key, value in node.items])
 
-    def visit_tuple_expr(self, node: TupleExpr) -> Node:
-        return TupleExpr(self.nodes(node.items))
+    def visit_tuple_expr(self, node: TupleExpr) -> TupleExpr:
+        return TupleExpr(self.expressions(node.items))
 
-    def visit_set_expr(self, node: SetExpr) -> Node:
-        return SetExpr(self.nodes(node.items))
+    def visit_set_expr(self, node: SetExpr) -> SetExpr:
+        return SetExpr(self.expressions(node.items))
 
-    def visit_index_expr(self, node: IndexExpr) -> Node:
-        new = IndexExpr(self.node(node.base), self.node(node.index))
+    def visit_index_expr(self, node: IndexExpr) -> IndexExpr:
+        new = IndexExpr(self.expr(node.base), self.expr(node.index))
         if node.method_type:
             new.method_type = self.type(node.method_type)
         if node.analyzed:
@@ -402,50 +432,51 @@ class TransformVisitor(NodeVisitor[Node]):
         return new
 
     def visit_type_application(self, node: TypeApplication) -> TypeApplication:
-        return TypeApplication(self.node(node.expr),
+        return TypeApplication(self.expr(node.expr),
                                self.types(node.types))
 
-    def visit_list_comprehension(self, node: ListComprehension) -> Node:
+    def visit_list_comprehension(self, node: ListComprehension) -> ListComprehension:
         generator = self.duplicate_generator(node.generator)
         generator.set_line(node.generator.line)
         return ListComprehension(generator)
 
-    def visit_set_comprehension(self, node: SetComprehension) -> Node:
+    def visit_set_comprehension(self, node: SetComprehension) -> SetComprehension:
         generator = self.duplicate_generator(node.generator)
         generator.set_line(node.generator.line)
         return SetComprehension(generator)
 
-    def visit_dictionary_comprehension(self, node: DictionaryComprehension) -> Node:
-        return DictionaryComprehension(self.node(node.key), self.node(node.value),
-                             [self.node(index) for index in node.indices],
-                             [self.node(s) for s in node.sequences],
-                             [[self.node(cond) for cond in conditions]
+    def visit_dictionary_comprehension(self, node: DictionaryComprehension
+                                       ) -> DictionaryComprehension:
+        return DictionaryComprehension(self.expr(node.key), self.expr(node.value),
+                             [self.expr(index) for index in node.indices],
+                             [self.expr(s) for s in node.sequences],
+                             [[self.expr(cond) for cond in conditions]
                               for conditions in node.condlists])
 
-    def visit_generator_expr(self, node: GeneratorExpr) -> Node:
+    def visit_generator_expr(self, node: GeneratorExpr) -> GeneratorExpr:
         return self.duplicate_generator(node)
 
     def duplicate_generator(self, node: GeneratorExpr) -> GeneratorExpr:
-        return GeneratorExpr(self.node(node.left_expr),
-                             [self.node(index) for index in node.indices],
-                             [self.node(s) for s in node.sequences],
-                             [[self.node(cond) for cond in conditions]
+        return GeneratorExpr(self.expr(node.left_expr),
+                             [self.expr(index) for index in node.indices],
+                             [self.expr(s) for s in node.sequences],
+                             [[self.expr(cond) for cond in conditions]
                               for conditions in node.condlists])
 
-    def visit_slice_expr(self, node: SliceExpr) -> Node:
-        return SliceExpr(self.optional_node(node.begin_index),
-                         self.optional_node(node.end_index),
-                         self.optional_node(node.stride))
+    def visit_slice_expr(self, node: SliceExpr) -> SliceExpr:
+        return SliceExpr(self.optional_expr(node.begin_index),
+                         self.optional_expr(node.end_index),
+                         self.optional_expr(node.stride))
 
-    def visit_conditional_expr(self, node: ConditionalExpr) -> Node:
-        return ConditionalExpr(self.node(node.cond),
-                               self.node(node.if_expr),
-                               self.node(node.else_expr))
+    def visit_conditional_expr(self, node: ConditionalExpr) -> ConditionalExpr:
+        return ConditionalExpr(self.expr(node.cond),
+                               self.expr(node.if_expr),
+                               self.expr(node.else_expr))
 
-    def visit_backquote_expr(self, node: BackquoteExpr) -> Node:
-        return BackquoteExpr(self.node(node.expr))
+    def visit_backquote_expr(self, node: BackquoteExpr) -> BackquoteExpr:
+        return BackquoteExpr(self.expr(node.expr))
 
-    def visit_type_var_expr(self, node: TypeVarExpr) -> Node:
+    def visit_type_var_expr(self, node: TypeVarExpr) -> TypeVarExpr:
         return TypeVarExpr(node.name(), node.fullname(),
                            self.types(node.values),
                            self.type(node.upper_bound), variance=node.variance)
@@ -454,15 +485,17 @@ class TransformVisitor(NodeVisitor[Node]):
         return TypeAliasExpr(node.type)
 
     def visit_newtype_expr(self, node: NewTypeExpr) -> NewTypeExpr:
-        return NewTypeExpr(node.info)
+        res = NewTypeExpr(node.name, node.old_type, line=node.line)
+        res.info = node.info
+        return res
 
-    def visit_namedtuple_expr(self, node: NamedTupleExpr) -> Node:
+    def visit_namedtuple_expr(self, node: NamedTupleExpr) -> NamedTupleExpr:
         return NamedTupleExpr(node.info)
 
-    def visit__promote_expr(self, node: PromoteExpr) -> Node:
+    def visit__promote_expr(self, node: PromoteExpr) -> PromoteExpr:
         return PromoteExpr(node.type)
 
-    def visit_temp_node(self, node: TempNode) -> Node:
+    def visit_temp_node(self, node: TempNode) -> TempNode:
         return TempNode(self.type(node.type))
 
     def node(self, node: Node) -> Node:
@@ -470,13 +503,31 @@ class TransformVisitor(NodeVisitor[Node]):
         new.set_line(node.line)
         return new
 
+    def mypyfile(self, node: MypyFile) -> MypyFile:
+        new = node.accept(self)
+        assert isinstance(new, MypyFile)
+        new.set_line(node.line)
+        return new
+
+    def expr(self, expr: Expression) -> Expression:
+        new = expr.accept(self)
+        assert isinstance(new, Expression)
+        new.set_line(expr.line)
+        return new
+
+    def stmt(self, stmt: Statement) -> Statement:
+        new = stmt.accept(self)
+        assert isinstance(new, Statement)
+        new.set_line(stmt.line)
+        return new
+
     # Helpers
     #
     # All the node helpers also propagate line numbers.
 
-    def optional_node(self, node: Node) -> Node:
-        if node:
-            return self.node(node)
+    def optional_expr(self, expr: Expression) -> Expression:
+        if expr:
+            return self.expr(expr)
         else:
             return None
 
@@ -491,11 +542,14 @@ class TransformVisitor(NodeVisitor[Node]):
         else:
             return None
 
-    def nodes(self, nodes: List[Node]) -> List[Node]:
-        return [self.node(node) for node in nodes]
+    def statements(self, statements: List[Statement]) -> List[Statement]:
+        return [self.stmt(stmt) for stmt in statements]
 
-    def optional_nodes(self, nodes: List[Node]) -> List[Node]:
-        return [self.optional_node(node) for node in nodes]
+    def expressions(self, expressions: List[Expression]) -> List[Expression]:
+        return [self.expr(expr) for expr in expressions]
+
+    def optional_expressions(self, expressions: List[Expression]) -> List[Expression]:
+        return [self.optional_expr(expr) for expr in expressions]
 
     def blocks(self, blocks: List[Block]) -> List[Block]:
         return [self.block(block) for block in blocks]
@@ -527,3 +581,20 @@ class TransformVisitor(NodeVisitor[Node]):
 
     def optional_types(self, types: List[Type]) -> List[Type]:
         return [self.optional_type(type) for type in types]
+
+
+class FuncMapInitializer(TraverserVisitor):
+    """This traverser creates mappings from nested FuncDefs to placeholder FuncDefs.
+
+    The placholders will later be replaced with transformed nodes.
+    """
+
+    def __init__(self, transformer: TransformVisitor) -> None:
+        self.transformer = transformer
+
+    def visit_func_def(self, node: FuncDef) -> None:
+        if node not in self.transformer.func_placeholder_map:
+            # Haven't seen this FuncDef before, so create a placeholder node.
+            self.transformer.func_placeholder_map[node] = FuncDef(
+                node.name(), node.arguments, node.body, None)
+        super().visit_func_def(node)
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 447d1f0..931cf7c 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -1,15 +1,15 @@
 """Semantic analysis of types"""
 
-from typing import Callable, cast, List, Tuple
+from typing import Callable, cast, List
 
 from mypy.types import (
-    Type, UnboundType, TypeVarType, TupleType, UnionType, Instance, AnyType, CallableType,
-    Void, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor, StarType, PartialType,
-    EllipsisType, UninhabitedType, TypeType
+    Type, UnboundType, TypeVarType, TupleType, UnionType, Instance,
+    AnyType, CallableType, Void, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor,
+    StarType, PartialType, EllipsisType, UninhabitedType, TypeType
 )
 from mypy.nodes import (
     BOUND_TVAR, TYPE_ALIAS, UNBOUND_IMPORTED,
-    TypeInfo, Context, SymbolTableNode, TypeVarExpr, Var, Node,
+    TypeInfo, Context, SymbolTableNode, Var, Expression,
     IndexExpr, RefExpr
 )
 from mypy.sametypes import is_same_type
@@ -28,7 +28,7 @@ type_constructors = {
 }
 
 
-def analyze_type_alias(node: Node,
+def analyze_type_alias(node: Expression,
                        lookup_func: Callable[[str, Context], SymbolTableNode],
                        lookup_fqn_func: Callable[[str], SymbolTableNode],
                        fail_func: Callable[[str, Context], None]) -> Type:
@@ -107,6 +107,9 @@ class TypeAnalyser(TypeVisitor[Type]):
             elif fullname == 'typing.Any':
                 return AnyType()
             elif fullname == 'typing.Tuple':
+                if len(t.args) == 0 and not t.empty_tuple_index:
+                    # Bare 'Tuple' is same as 'tuple'
+                    return self.builtin_type('builtins.tuple')
                 if len(t.args) == 2 and isinstance(t.args[1], EllipsisType):
                     # Tuple[T, ...] (uniform, variable-length tuple)
                     node = self.lookup_fqn_func('builtins.tuple')
@@ -164,7 +167,8 @@ class TypeAnalyser(TypeVisitor[Type]):
                 # valid count at this point. Thus we may construct an
                 # Instance with an invalid number of type arguments.
                 instance = Instance(info, self.anal_array(t.args), t.line)
-                if info.tuple_type is None:
+                tup = info.tuple_type
+                if tup is None:
                     return instance
                 else:
                     # The class has a Tuple[...] base class so it will be
@@ -172,9 +176,8 @@ class TypeAnalyser(TypeVisitor[Type]):
                     if t.args:
                         self.fail('Generic tuple types not supported', t)
                         return AnyType()
-                    return TupleType(self.anal_array(info.tuple_type.items),
-                                     fallback=instance,
-                                     line=t.line)
+                    return tup.copy_modified(items=self.anal_array(tup.items),
+                                             fallback=instance)
         else:
             return AnyType()
 
diff --git a/mypy/typefixture.py b/mypy/typefixture.py
index 59ffeea..a5fac17 100644
--- a/mypy/typefixture.py
+++ b/mypy/typefixture.py
@@ -193,6 +193,7 @@ class TypeFixture:
                             a[-1], self.function)
 
     def make_type_info(self, name: str,
+                       module_name: str = None,
                        is_abstract: bool = False,
                        mro: List[TypeInfo] = None,
                        bases: List[Instance] = None,
@@ -203,6 +204,12 @@ class TypeFixture:
         class_def = ClassDef(name, Block([]), None, [])
         class_def.fullname = name
 
+        if module_name is None:
+            if '.' in name:
+                module_name = name.rsplit('.', 1)[0]
+            else:
+                module_name = '__main__'
+
         if typevars:
             v = []  # type: List[TypeVarDef]
             for id, n in enumerate(typevars, 1):
@@ -213,7 +220,7 @@ class TypeFixture:
                 v.append(TypeVarDef(n, id, None, self.o, variance=variance))
             class_def.type_vars = v
 
-        info = TypeInfo(SymbolTable(), class_def)
+        info = TypeInfo(SymbolTable(), class_def, module_name)
         if mro is None:
             mro = []
             if name != 'builtins.object':
diff --git a/mypy/types.py b/mypy/types.py
index d582079..09e473d 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -21,15 +21,20 @@ class Type(mypy.nodes.Context):
     """Abstract base class for all types."""
 
     line = 0
+    column = 0
     can_be_true = True
     can_be_false = True
 
-    def __init__(self, line: int = -1) -> None:
+    def __init__(self, line: int = -1, column: int = -1) -> None:
         self.line = line
+        self.column = column
 
     def get_line(self) -> int:
         return self.line
 
+    def get_column(self) -> int:
+        return self.column
+
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         raise RuntimeError('Not implemented')
 
@@ -110,9 +115,11 @@ class TypeVarDef(mypy.nodes.Context):
     upper_bound = None  # type: Type
     variance = INVARIANT  # type: int
     line = 0
+    column = 0
 
     def __init__(self, name: str, id: Union[TypeVarId, int], values: Optional[List[Type]],
-                 upper_bound: Type, variance: int = INVARIANT, line: int = -1) -> None:
+                 upper_bound: Type, variance: int = INVARIANT, line: int = -1,
+                 column: int = -1) -> None:
         self.name = name
         if isinstance(id, int):
             id = TypeVarId(id)
@@ -121,16 +128,20 @@ class TypeVarDef(mypy.nodes.Context):
         self.upper_bound = upper_bound
         self.variance = variance
         self.line = line
+        self.column = column
 
     @staticmethod
     def new_unification_variable(old: 'TypeVarDef') -> 'TypeVarDef':
         new_id = TypeVarId.new(meta_level=1)
         return TypeVarDef(old.name, new_id, old.values,
-                          old.upper_bound, old.variance, old.line)
+                          old.upper_bound, old.variance, old.line, old.column)
 
     def get_line(self) -> int:
         return self.line
 
+    def get_column(self) -> int:
+        return self.column
+
     def __repr__(self) -> str:
         if self.values:
             return '{} in {}'.format(self.name, tuple(self.values))
@@ -170,20 +181,25 @@ class UnboundType(Type):
     optional = False
     # is this type a return type?
     is_ret_type = False
+    # special case for X[()]
+    empty_tuple_index = False
 
     def __init__(self,
                  name: str,
                  args: List[Type] = None,
                  line: int = -1,
+                 column: int = -1,
                  optional: bool = False,
-                 is_ret_type: bool = False) -> None:
+                 is_ret_type: bool = False,
+                 empty_tuple_index: bool = False) -> None:
         if not args:
             args = []
         self.name = name
         self.args = args
         self.optional = optional
         self.is_ret_type = is_ret_type
-        super().__init__(line)
+        self.empty_tuple_index = empty_tuple_index
+        super().__init__(line, column)
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_unbound_type(self)
@@ -218,8 +234,8 @@ class TypeList(Type):
 
     items = None  # type: List[Type]
 
-    def __init__(self, items: List[Type], line: int = -1) -> None:
-        super().__init__(line)
+    def __init__(self, items: List[Type], line: int = -1, column: int = -1) -> None:
+        super().__init__(line, column)
         self.items = items
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
@@ -239,8 +255,8 @@ class TypeList(Type):
 class AnyType(Type):
     """The type 'Any'."""
 
-    def __init__(self, implicit: bool = False, line: int = -1) -> None:
-        super().__init__(line)
+    def __init__(self, implicit: bool = False, line: int = -1, column: int = -1) -> None:
+        super().__init__(line, column)
         self.implicit = implicit
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
@@ -265,15 +281,15 @@ class Void(Type):
     can_be_true = False
     source = ''   # May be None; function that generated this value
 
-    def __init__(self, source: str = None, line: int = -1) -> None:
+    def __init__(self, source: str = None, line: int = -1, column: int = -1) -> None:
         self.source = source
-        super().__init__(line)
+        super().__init__(line, column)
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_void(self)
 
     def with_source(self, source: str) -> 'Void':
-        return Void(source, self.line)
+        return Void(source, self.line, self.column)
 
     def serialize(self) -> JsonDict:
         return {'.class': 'Void'}
@@ -301,8 +317,8 @@ class UninhabitedType(Type):
     can_be_true = False
     can_be_false = False
 
-    def __init__(self, line: int = -1) -> None:
-        super().__init__(line)
+    def __init__(self, line: int = -1, column: int = -1) -> None:
+        super().__init__(line, column)
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_uninhabited_type(self)
@@ -336,8 +352,8 @@ class NoneTyp(Type):
 
     can_be_true = False
 
-    def __init__(self, is_ret_type: bool = False, line: int = -1) -> None:
-        super().__init__(line)
+    def __init__(self, is_ret_type: bool = False, line: int = -1, column: int = -1) -> None:
+        super().__init__(line, column)
         self.is_ret_type = is_ret_type
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
@@ -373,9 +389,9 @@ class DeletedType(Type):
 
     source = ''   # May be None; name that generated this value
 
-    def __init__(self, source: str = None, line: int = -1) -> None:
+    def __init__(self, source: str = None, line: int = -1, column: int = -1) -> None:
         self.source = source
-        super().__init__(line)
+        super().__init__(line, column)
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_deleted_type(self)
@@ -401,11 +417,11 @@ class Instance(Type):
     erased = False      # True if result of type variable substitution
 
     def __init__(self, typ: mypy.nodes.TypeInfo, args: List[Type],
-                 line: int = -1, erased: bool = False) -> None:
+                 line: int = -1, column: int = -1, erased: bool = False) -> None:
         self.type = typ
         self.args = args
         self.erased = erased
-        super().__init__(line)
+        super().__init__(line, column)
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_instance(self)
@@ -448,13 +464,13 @@ class TypeVarType(Type):
     # See comments in TypeVarDef for more about variance.
     variance = INVARIANT  # type: int
 
-    def __init__(self, binder: TypeVarDef, line: int = -1) -> None:
+    def __init__(self, binder: TypeVarDef, line: int = -1, column: int = -1) -> None:
         self.name = binder.name
         self.id = binder.id
         self.values = binder.values
         self.upper_bound = binder.upper_bound
         self.variance = binder.variance
-        super().__init__(line)
+        super().__init__(line, column)
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_type_var(self)
@@ -544,13 +560,14 @@ class CallableType(FunctionLike):
     def __init__(self,
                  arg_types: List[Type],
                  arg_kinds: List[int],
-                 arg_names: List[str],
+                 arg_names: List[Optional[str]],
                  ret_type: Type,
                  fallback: Instance,
                  name: str = None,
                  definition: SymbolNode = None,
                  variables: List[TypeVarDef] = None,
                  line: int = -1,
+                 column: int = -1,
                  is_ellipsis_args: bool = False,
                  implicit: bool = False,
                  is_classmethod_class: bool = False,
@@ -572,7 +589,7 @@ class CallableType(FunctionLike):
         self.is_ellipsis_args = is_ellipsis_args
         self.implicit = implicit
         self.special_sig = special_sig
-        super().__init__(line)
+        super().__init__(line, column)
 
     def copy_modified(self,
                       arg_types: List[Type] = _dummy,
@@ -584,6 +601,7 @@ class CallableType(FunctionLike):
                       definition: SymbolNode = _dummy,
                       variables: List[TypeVarDef] = _dummy,
                       line: int = _dummy,
+                      column: int = _dummy,
                       is_ellipsis_args: bool = _dummy,
                       special_sig: Optional[str] = _dummy) -> 'CallableType':
         return CallableType(
@@ -596,6 +614,7 @@ class CallableType(FunctionLike):
             definition=definition if definition is not _dummy else self.definition,
             variables=variables if variables is not _dummy else self.variables,
             line=line if line is not _dummy else self.line,
+            column=column if column is not _dummy else self.column,
             is_ellipsis_args=(
                 is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args),
             implicit=self.implicit,
@@ -694,7 +713,7 @@ class Overloaded(FunctionLike):
     def __init__(self, items: List[CallableType]) -> None:
         self._items = items
         self.fallback = items[0].fallback
-        super().__init__(items[0].line)
+        super().__init__(items[0].line, items[0].column)
 
     def items(self) -> List[CallableType]:
         return self._items
@@ -748,13 +767,13 @@ class TupleType(Type):
     implicit = False
 
     def __init__(self, items: List[Type], fallback: Instance, line: int = -1,
-                 implicit: bool = False) -> None:
+                 column: int = -1, implicit: bool = False) -> None:
         self.items = items
         self.fallback = fallback
         self.implicit = implicit
         self.can_be_true = len(self.items) > 0
         self.can_be_false = len(self.items) == 0
-        super().__init__(line)
+        super().__init__(line, column)
 
     def length(self) -> int:
         return len(self.items)
@@ -776,6 +795,18 @@ class TupleType(Type):
                          Instance.deserialize(data['fallback']),
                          implicit=data['implicit'])
 
+    def copy_modified(self, *, fallback: Instance = None,
+                  items: List[Type] = None) -> 'TupleType':
+        if fallback is None:
+            fallback = self.fallback
+        if items is None:
+            items = self.items
+        return TupleType(items, fallback, self.line, self.column)
+
+    def slice(self, begin: int, stride: int, end: int) -> 'TupleType':
+        return TupleType(self.items[begin:end:stride], self.fallback,
+                         self.line, self.column, self.implicit)
+
 
 class StarType(Type):
     """The star type *type_parameter.
@@ -785,9 +816,9 @@ class StarType(Type):
 
     type = None  # type: Type
 
-    def __init__(self, type: Type, line: int = -1) -> None:
+    def __init__(self, type: Type, line: int = -1, column: int = -1) -> None:
         self.type = type
-        super().__init__(line)
+        super().__init__(line, column)
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_star_type(self)
@@ -798,16 +829,16 @@ class UnionType(Type):
 
     items = None  # type: List[Type]
 
-    def __init__(self, items: List[Type], line: int = -1) -> None:
+    def __init__(self, items: List[Type], line: int = -1, column: int = -1) -> None:
         self.items = items
         self.can_be_true = any(item.can_be_true for item in items)
         self.can_be_false = any(item.can_be_false for item in items)
-        super().__init__(line)
+        super().__init__(line, column)
 
     @staticmethod
-    def make_union(items: List[Type], line: int = -1) -> Type:
+    def make_union(items: List[Type], line: int = -1, column: int = -1) -> Type:
         if len(items) > 1:
-            return UnionType(items, line)
+            return UnionType(items, line, column)
         elif len(items) == 1:
             return items[0]
         else:
@@ -817,7 +848,7 @@ class UnionType(Type):
                 return Void()
 
     @staticmethod
-    def make_simplified_union(items: List[Type], line: int = -1) -> Type:
+    def make_simplified_union(items: List[Type], line: int = -1, column: int = -1) -> Type:
         while any(isinstance(typ, UnionType) for typ in items):
             all_items = []  # type: List[Type]
             for typ in items:
@@ -961,8 +992,8 @@ class TypeType(Type):
     # a generic class instance, a union, Any, a type variable...
     item = None  # type: Type
 
-    def __init__(self, item: Type, *, line: int = -1) -> None:
-        super().__init__(line)
+    def __init__(self, item: Type, *, line: int = -1, column: int = -1) -> None:
+        super().__init__(line, column)
         self.item = item
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
@@ -1100,7 +1131,7 @@ class TypeTranslator(TypeVisitor[Type]):
         return t
 
     def visit_instance(self, t: Instance) -> Type:
-        return Instance(t.type, self.translate_types(t.args), t.line)
+        return Instance(t.type, self.translate_types(t.args), t.line, t.column)
 
     def visit_type_var(self, t: TypeVarType) -> Type:
         return t
@@ -1116,13 +1147,13 @@ class TypeTranslator(TypeVisitor[Type]):
     def visit_tuple_type(self, t: TupleType) -> Type:
         return TupleType(self.translate_types(t.items),
                          cast(Any, t.fallback.accept(self)),
-                         t.line)
+                         t.line, t.column)
 
     def visit_star_type(self, t: StarType) -> Type:
-        return StarType(t.type.accept(self), t.line)
+        return StarType(t.type.accept(self), t.line, t.column)
 
     def visit_union_type(self, t: UnionType) -> Type:
-        return UnionType(self.translate_types(t.items), t.line)
+        return UnionType(self.translate_types(t.items), t.line, t.column)
 
     def visit_ellipsis_type(self, t: EllipsisType) -> Type:
         return t
@@ -1145,7 +1176,7 @@ class TypeTranslator(TypeVisitor[Type]):
         return Overloaded(items=items)
 
     def visit_type_type(self, t: TypeType) -> Type:
-        return TypeType(t.item.accept(self), line=t.line)
+        return TypeType(t.item.accept(self), line=t.line, column=t.column)
 
 
 class TypeStrVisitor(TypeVisitor[str]):
@@ -1434,14 +1465,14 @@ def true_only(t: Type) -> Type:
     """
     if not t.can_be_true:
         # All values of t are False-ish, so there are no true values in it
-        return UninhabitedType(line=t.line)
+        return UninhabitedType(line=t.line, column=t.column)
     elif not t.can_be_false:
         # All values of t are already True-ish, so true_only is idempotent in this case
         return t
     elif isinstance(t, UnionType):
         # The true version of a union type is the union of the true versions of its components
         new_items = [true_only(item) for item in t.items]
-        return UnionType.make_simplified_union(new_items, line=t.line)
+        return UnionType.make_simplified_union(new_items, line=t.line, column=t.column)
     else:
         new_t = copy_type(t)
         new_t.can_be_false = False
@@ -1461,7 +1492,7 @@ def false_only(t: Type) -> Type:
     elif isinstance(t, UnionType):
         # The false version of a union type is the union of the false versions of its components
         new_items = [false_only(item) for item in t.items]
-        return UnionType.make_simplified_union(new_items, line=t.line)
+        return UnionType.make_simplified_union(new_items, line=t.line, column=t.column)
     else:
         new_t = copy_type(t)
         new_t.can_be_true = False
diff --git a/mypy/util.py b/mypy/util.py
index d8b10b8..c5b635e 100644
--- a/mypy/util.py
+++ b/mypy/util.py
@@ -12,6 +12,19 @@ ENCODING_RE = re.compile(br'([ \t\v]*#.*(\r\n?|\n))??[ \t\v]*#.*coding[:=][ \t]*
 default_python2_interpreter = ['python2', 'python', '/usr/bin/python']
 
 
+def split_module_names(mod_name: str) -> List[str]:
+    """Return the module and all parent module names.
+
+    So, if `mod_name` is 'a.b.c', this function will return
+    ['a.b.c', 'a.b', and 'a'].
+    """
+    out = [mod_name]
+    while '.' in mod_name:
+        mod_name = mod_name.rsplit('.', 1)[0]
+        out.append(mod_name)
+    return out
+
+
 def short_type(obj: object) -> str:
     """Return the last component of the type name of an object.
 
diff --git a/mypy/version.py b/mypy/version.py
index 9a8e054..68eb9b6 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -1 +1 @@
-__version__ = '0.4.4'
+__version__ = '0.4.5'
diff --git a/setup.py b/setup.py
index f344cce..3733f72 100644
--- a/setup.py
+++ b/setup.py
@@ -90,7 +90,7 @@ classifiers = [
     'Intended Audience :: Developers',
     'License :: OSI Approved :: MIT License',
     'Operating System :: POSIX',
-    'Programming Language :: Python :: 3.2',
+    'Programming Language :: Python :: 3',
     'Programming Language :: Python :: 3.3',
     'Programming Language :: Python :: 3.4',
     'Programming Language :: Python :: 3.5',
diff --git a/typeshed/stdlib/2.7/HTMLParser.pyi b/typeshed/stdlib/2.7/HTMLParser.pyi
index ae6c309..c6b8a96 100644
--- a/typeshed/stdlib/2.7/HTMLParser.pyi
+++ b/typeshed/stdlib/2.7/HTMLParser.pyi
@@ -2,7 +2,7 @@ from typing import List, Tuple, AnyStr
 from markupbase import ParserBase
 
 class HTMLParser(ParserBase):
-    def __init__(self, *args, convert_charrefs: bool) -> None: ...
+    def __init__(self) -> None: ...
     def feed(self, feed: AnyStr) -> None: ...
     def close(self) -> None: ...
     def reset(self) -> None: ...
@@ -25,4 +25,7 @@ class HTMLParser(ParserBase):
 
     def unescape(self, s: AnyStr) -> AnyStr: ...
 
-class HTMLParseError(Exception): ...
+class HTMLParseError(Exception):
+    msg = ...  # type: str
+    lineno = ...  # type: int
+    offset = ...  # type: int
\ No newline at end of file
diff --git a/typeshed/stdlib/2.7/UserDict.pyi b/typeshed/stdlib/2.7/UserDict.pyi
index e5cfedc..10525fe 100644
--- a/typeshed/stdlib/2.7/UserDict.pyi
+++ b/typeshed/stdlib/2.7/UserDict.pyi
@@ -1,4 +1,5 @@
-from typing import Dict, Generic, Mapping, TypeVar
+from typing import (Any, Container, Dict, Generic, Iterable, Iterator, List,
+                    Mapping, Sized, Tuple, TypeVar, overload)
 
 _KT = TypeVar('_KT')
 _VT = TypeVar('_VT')
@@ -8,4 +9,30 @@ class UserDict(Dict[_KT, _VT], Generic[_KT, _VT]):
 
     def __init__(self, initialdata: Mapping[_KT, _VT] = ...) -> None: ...
 
-    # TODO: DictMixin
+    # TODO: __iter__ is not available for UserDict
+
+class IterableUserDict(UserDict[_KT, _VT], Generic[_KT, _VT]):
+    ...
+
+class DictMixin(Sized, Iterable[_KT], Container[_KT], Generic[_KT, _VT]):
+    def has_key(self, key: _KT) -> bool: ...
+
+    # From  typing.Mapping[_KT, _VT]
+    # (can't inherit because of keys())
+    def get(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def values(self) -> List[_VT]: ...
+    def items(self) -> List[Tuple[_KT, _VT]]: ...
+    def iterkeys(self) -> Iterator[_KT]: ...
+    def itervalues(self) -> Iterator[_VT]: ...
+    def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ...
+    def __contains__(self, o: Any) -> bool: ...
+
+    # From typing.MutableMapping[_KT, _VT]
+    def clear(self) -> None: ...
+    def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def popitem(self) -> Tuple[_KT, _VT]: ...
+    def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
+    @overload
+    def update(self, m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
+    @overload
+    def update(self, m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
diff --git a/typeshed/stdlib/2.7/__builtin__.pyi b/typeshed/stdlib/2.7/__builtin__.pyi
index 0ef4f3b..9e3dd33 100644
--- a/typeshed/stdlib/2.7/__builtin__.pyi
+++ b/typeshed/stdlib/2.7/__builtin__.pyi
@@ -160,6 +160,7 @@ class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     def __float__(self) -> float: ...
     def __abs__(self) -> float: ...
     def __hash__(self) -> int: ...
+    def __format__(self, format_spec: AnyStr) -> str: ...
 
 class complex(SupportsAbs[float]):
     @overload
@@ -238,9 +239,9 @@ class unicode(basestring, Sequence[unicode]):
     def rindex(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
     def rjust(self, width: int, fillchar: unicode = u' ') -> unicode: ...
     def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
-    def rsplit(self, sep: unicode = ..., maxsplit: int = ...) -> List[unicode]: ...
+    def rsplit(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ...
     def rstrip(self, chars: unicode = ...) -> unicode: ...
-    def split(self, sep: unicode = ..., maxsplit: int = ...) -> List[unicode]: ...
+    def split(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ...
     def splitlines(self, keepends: bool = ...) -> List[unicode]: ...
     def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = 0,
                    end: int = ...) -> bool: ...
@@ -318,7 +319,7 @@ class str(basestring, Sequence[str]):
     @overload
     def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
     @overload
-    def rsplit(self, sep: str = ..., maxsplit: int = ...) -> List[str]: ...
+    def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
     @overload
     def rsplit(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ...
     @overload
@@ -326,7 +327,7 @@ class str(basestring, Sequence[str]):
     @overload
     def rstrip(self, chars: unicode) -> unicode: ...
     @overload
-    def split(self, sep: str = ..., maxsplit: int = ...) -> List[str]: ...
+    def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
     @overload
     def split(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ...
     def splitlines(self, keepends: bool = ...) -> List[str]: ...
@@ -337,7 +338,7 @@ class str(basestring, Sequence[str]):
     def strip(self, chars: unicode) -> unicode: ...
     def swapcase(self) -> str: ...
     def title(self) -> str: ...
-    def translate(self, table: AnyStr, deletechars: AnyStr = None) -> AnyStr: ...
+    def translate(self, table: Optional[AnyStr], deletechars: AnyStr = ...) -> AnyStr: ...
     def upper(self) -> str: ...
     def zfill(self, width: int) -> str: ...
 
@@ -401,9 +402,9 @@ class bytearray(MutableSequence[int]):
     def rindex(self, sub: str, start: int = 0, end: int = ...) -> int: ...
     def rjust(self, width: int, fillchar: str = ...) -> bytearray: ...
     def rpartition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ...
-    def rsplit(self, sep: str = ..., maxsplit: int = ...) -> List[bytearray]: ...
+    def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[bytearray]: ...
     def rstrip(self, chars: str = ...) -> bytearray: ...
-    def split(self, sep: str = ..., maxsplit: int = ...) -> List[bytearray]: ...
+    def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[bytearray]: ...
     def splitlines(self, keepends: bool = ...) -> List[bytearray]: ...
     def startswith(self, prefix: Union[str, Tuple[str, ...]]) -> bool: ...
     def strip(self, chars: str = ...) -> bytearray: ...
@@ -564,7 +565,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def __str__(self) -> str: ...
 
 class set(MutableSet[_T], Generic[_T]):
-    def __init__(self, iterable: Iterable[_T]=None) -> None: ...
+    def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
     def add(self, element: _T) -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> set[_T]: ...
@@ -702,13 +703,13 @@ def map(func: Callable[[_T1, _T2], _S],
         iter1: Iterable[_T1],
         iter2: Iterable[_T2]) -> List[_S]: ...  # TODO more than two iterables
 @overload
-def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
-def max(iterable: Iterable[_T], key: Callable[[_T], Any] = None) -> _T: ...
+def max(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
-def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
-def min(iterable: Iterable[_T], key: Callable[[_T], Any] = None) -> _T: ...
+def min(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
 def next(i: Iterator[_T]) -> _T: ...
 @overload
diff --git a/typeshed/stdlib/2.7/_weakrefset.pyi b/typeshed/stdlib/2.7/_weakrefset.pyi
index d0689f1..27aade6 100644
--- a/typeshed/stdlib/2.7/_weakrefset.pyi
+++ b/typeshed/stdlib/2.7/_weakrefset.pyi
@@ -1,5 +1,14 @@
-from typing import Iterator, Any
+from typing import Iterator, Any, Iterable, MutableSet, TypeVar, Generic
 
-class WeakSet:
-    def __iter__(self) -> Iterator[Any]: ...
-    def add(self, *args: Any, **kwargs: Any) -> Any: ...
+_T = TypeVar('_T')
+
+class WeakSet(MutableSet[_T], Generic[_T]):
+    def __init__(self, data: Iterable[_T] = ...) -> None: ...
+
+    def add(self, x: _T) -> None: ...
+    def discard(self, x: _T) -> None: ...
+    def __contains__(self, x: Any) -> bool: ...
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[_T]: ...
+
+    # TODO: difference, difference_update, ...
diff --git a/typeshed/stdlib/2.7/builtins.pyi b/typeshed/stdlib/2.7/builtins.pyi
index 0ef4f3b..9e3dd33 100644
--- a/typeshed/stdlib/2.7/builtins.pyi
+++ b/typeshed/stdlib/2.7/builtins.pyi
@@ -160,6 +160,7 @@ class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     def __float__(self) -> float: ...
     def __abs__(self) -> float: ...
     def __hash__(self) -> int: ...
+    def __format__(self, format_spec: AnyStr) -> str: ...
 
 class complex(SupportsAbs[float]):
     @overload
@@ -238,9 +239,9 @@ class unicode(basestring, Sequence[unicode]):
     def rindex(self, sub: unicode, start: int = 0, end: int = 0) -> int: ...
     def rjust(self, width: int, fillchar: unicode = u' ') -> unicode: ...
     def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
-    def rsplit(self, sep: unicode = ..., maxsplit: int = ...) -> List[unicode]: ...
+    def rsplit(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ...
     def rstrip(self, chars: unicode = ...) -> unicode: ...
-    def split(self, sep: unicode = ..., maxsplit: int = ...) -> List[unicode]: ...
+    def split(self, sep: Optional[unicode] = ..., maxsplit: int = ...) -> List[unicode]: ...
     def splitlines(self, keepends: bool = ...) -> List[unicode]: ...
     def startswith(self, prefix: Union[unicode, Tuple[unicode, ...]], start: int = 0,
                    end: int = ...) -> bool: ...
@@ -318,7 +319,7 @@ class str(basestring, Sequence[str]):
     @overload
     def rpartition(self, sep: unicode) -> Tuple[unicode, unicode, unicode]: ...
     @overload
-    def rsplit(self, sep: str = ..., maxsplit: int = ...) -> List[str]: ...
+    def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
     @overload
     def rsplit(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ...
     @overload
@@ -326,7 +327,7 @@ class str(basestring, Sequence[str]):
     @overload
     def rstrip(self, chars: unicode) -> unicode: ...
     @overload
-    def split(self, sep: str = ..., maxsplit: int = ...) -> List[str]: ...
+    def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
     @overload
     def split(self, sep: unicode, maxsplit: int = ...) -> List[unicode]: ...
     def splitlines(self, keepends: bool = ...) -> List[str]: ...
@@ -337,7 +338,7 @@ class str(basestring, Sequence[str]):
     def strip(self, chars: unicode) -> unicode: ...
     def swapcase(self) -> str: ...
     def title(self) -> str: ...
-    def translate(self, table: AnyStr, deletechars: AnyStr = None) -> AnyStr: ...
+    def translate(self, table: Optional[AnyStr], deletechars: AnyStr = ...) -> AnyStr: ...
     def upper(self) -> str: ...
     def zfill(self, width: int) -> str: ...
 
@@ -401,9 +402,9 @@ class bytearray(MutableSequence[int]):
     def rindex(self, sub: str, start: int = 0, end: int = ...) -> int: ...
     def rjust(self, width: int, fillchar: str = ...) -> bytearray: ...
     def rpartition(self, sep: str) -> Tuple[bytearray, bytearray, bytearray]: ...
-    def rsplit(self, sep: str = ..., maxsplit: int = ...) -> List[bytearray]: ...
+    def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[bytearray]: ...
     def rstrip(self, chars: str = ...) -> bytearray: ...
-    def split(self, sep: str = ..., maxsplit: int = ...) -> List[bytearray]: ...
+    def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[bytearray]: ...
     def splitlines(self, keepends: bool = ...) -> List[bytearray]: ...
     def startswith(self, prefix: Union[str, Tuple[str, ...]]) -> bool: ...
     def strip(self, chars: str = ...) -> bytearray: ...
@@ -564,7 +565,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def __str__(self) -> str: ...
 
 class set(MutableSet[_T], Generic[_T]):
-    def __init__(self, iterable: Iterable[_T]=None) -> None: ...
+    def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
     def add(self, element: _T) -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> set[_T]: ...
@@ -702,13 +703,13 @@ def map(func: Callable[[_T1, _T2], _S],
         iter1: Iterable[_T1],
         iter2: Iterable[_T2]) -> List[_S]: ...  # TODO more than two iterables
 @overload
-def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
-def max(iterable: Iterable[_T], key: Callable[[_T], Any] = None) -> _T: ...
+def max(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
-def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
-def min(iterable: Iterable[_T], key: Callable[[_T], Any] = None) -> _T: ...
+def min(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
 def next(i: Iterator[_T]) -> _T: ...
 @overload
diff --git a/typeshed/stdlib/2.7/calendar.pyi b/typeshed/stdlib/2.7/calendar.pyi
index cb458ac..045e912 100644
--- a/typeshed/stdlib/2.7/calendar.pyi
+++ b/typeshed/stdlib/2.7/calendar.pyi
@@ -73,3 +73,14 @@ def setfirstweekday(firstweekday: int) -> None: ...
 def format(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ...
 def formatstring(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ...
 def timegm(tuple: Tuple[int, ...]) -> int: ...
+
+# Below constants are not in docs or __all__, but enough people have used them
+# they are now effectively public.
+
+MONDAY = ...  # type: int
+TUESDAY = ...  # type: int
+WEDNESDAY = ...  # type: int
+THURSDAY = ...  # type: int
+FRIDAY = ...  # type: int
+SATURDAY = ...  # type: int
+SUNDAY = ...  # type: int
diff --git a/typeshed/stdlib/2.7/collections.pyi b/typeshed/stdlib/2.7/collections.pyi
index 9d46243..4d004fd 100644
--- a/typeshed/stdlib/2.7/collections.pyi
+++ b/typeshed/stdlib/2.7/collections.pyi
@@ -7,7 +7,7 @@
 # NOTE: These are incomplete!
 
 from typing import (
-    Any, Dict, Generic, TypeVar, Iterable, Tuple, Callable, Mapping, overload, Iterator,
+    Any, Dict, Generic, TypeVar, Iterable, Tuple, Callable, Mapping, overload, Iterator, Type,
     Sized, Optional, List, Set, Sequence, Union, Reversible, MutableMapping, MutableSequence
 )
 import typing
@@ -17,7 +17,8 @@ _KT = TypeVar('_KT')
 _VT = TypeVar('_VT')
 
 # namedtuple is special-cased in the type checker; the initializer is ignored.
-namedtuple = ...  # type: Any
+def namedtuple(typename: str, field_names: Union[str, Iterable[Any]], *,
+               verbose: bool = ..., rename: bool = ...) -> Type[tuple]: ...
 
 class deque(Sized, Iterable[_T], Reversible[_T], Generic[_T]):
     def __init__(self, iterable: Iterable[_T] = ...,
diff --git a/typeshed/stdlib/2.7/datetime.pyi b/typeshed/stdlib/2.7/datetime.pyi
index ef4f9a6..e7f4b44 100644
--- a/typeshed/stdlib/2.7/datetime.pyi
+++ b/typeshed/stdlib/2.7/datetime.pyi
@@ -14,16 +14,7 @@ class tzinfo(object):
     def dst(self, dt: Optional[datetime]) -> Optional[timedelta]: ...
     def fromutc(self, dt: datetime) -> datetime: ...
 
-class timezone(tzinfo):
-    utc = ...  # type: tzinfo
-    min = ...  # type: tzinfo
-    max = ...  # type: tzinfo
-
-    def __init__(self, offset: timedelta, name: str = ...) -> None: ...
-    def __hash__(self) -> int: ...
-
 _tzinfo = tzinfo
-_timezone = timezone
 
 class date(object):
     min = ...  # type: date
@@ -94,7 +85,7 @@ class time:
     def isoformat(self) -> str: ...
     def strftime(self, fmt: str) -> str: ...
     def __format__(self, fmt: str) -> str: ...
-    def utcoffset(self) -> Optional[int]: ...
+    def utcoffset(self) -> Optional[timedelta]: ...
     def tzname(self) -> Optional[str]: ...
     def dst(self) -> Optional[int]: ...
     def replace(self, hour: int = ..., minute: int = ..., second: int = ...,
@@ -173,7 +164,7 @@ class datetime(object):
     def tzinfo(self) -> _tzinfo: ...
 
     @classmethod
-    def fromtimestamp(cls, t: float, tz: timezone = ...) -> datetime: ...
+    def fromtimestamp(cls, t: float, tz: _tzinfo = ...) -> datetime: ...
     @classmethod
     def utcfromtimestamp(cls, t: float) -> datetime: ...
     @classmethod
@@ -203,7 +194,7 @@ class datetime(object):
     def isoformat(self, sep: str = ...) -> str: ...
     @classmethod
     def strptime(cls, date_string: str, format: str) -> datetime: ...
-    def utcoffset(self) -> Optional[int]: ...
+    def utcoffset(self) -> Optional[timedelta]: ...
     def tzname(self) -> Optional[str]: ...
     def dst(self) -> Optional[int]: ...
     def __le__(self, other: datetime) -> bool: ...
diff --git a/typeshed/stdlib/2.7/decimal.pyi b/typeshed/stdlib/2.7/decimal.pyi
index ebb6635..134505c 100644
--- a/typeshed/stdlib/2.7/decimal.pyi
+++ b/typeshed/stdlib/2.7/decimal.pyi
@@ -1,17 +1,26 @@
 # Stubs for decimal (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
 
-from typing import Any, SupportsAbs, SupportsFloat, SupportsInt
+from typing import (
+    Any, Dict, NamedTuple, Optional, Sequence, Tuple, Union,
+    SupportsAbs, SupportsFloat, SupportsInt,
+)
 
-ROUND_DOWN = ...  # type: Any
-ROUND_HALF_UP = ...  # type: Any
-ROUND_HALF_EVEN = ...  # type: Any
-ROUND_CEILING = ...  # type: Any
-ROUND_FLOOR = ...  # type: Any
-ROUND_UP = ...  # type: Any
-ROUND_HALF_DOWN = ...  # type: Any
-ROUND_05UP = ...  # type: Any
+_Decimal = Union[Decimal, int]
+_ComparableNum = Union[Decimal, int, float]
+
+DecimalTuple = NamedTuple('DecimalTuple',
+                          [('sign', int),
+                           ('digits', Sequence[int]), # TODO: Use Tuple[int, ...]
+                           ('exponent', int)])
+
+ROUND_DOWN = ...  # type: str
+ROUND_HALF_UP = ...  # type: str 
+ROUND_HALF_EVEN = ...  # type: str
+ROUND_CEILING = ...  # type: str 
+ROUND_FLOOR = ...  # type: str
+ROUND_UP = ...  # type: str
+ROUND_HALF_DOWN = ...  # type: str
+ROUND_05UP = ...  # type: str
 
 class DecimalException(ArithmeticError):
     def handle(self, context, *args): ...
@@ -40,120 +49,125 @@ class Overflow(Inexact, Rounded): ...
 
 class Underflow(Inexact, Rounded, Subnormal): ...
 
-def setcontext(context): ...
-def getcontext(): ...
-def localcontext(ctx=None): ...
+def setcontext(context: Context): ...
+def getcontext() -> Context: ...
+def localcontext(ctx: Optional[Context] = None) -> _ContextManager: ...
 
 class Decimal(SupportsAbs[Decimal], SupportsFloat, SupportsInt):
-    def __new__(cls, value=..., context=None): ...
+    def __init__(cls, value: Union[_Decimal, float, str,
+                                   Tuple[int, Sequence[int], int]] = ...,
+                 context: Context = ...) -> None: ...
     @classmethod
-    def from_float(cls, f): ...
-    def __nonzero__(self): ...
-    def __eq__(self, other, context=None): ...
-    def __ne__(self, other, context=None): ...
-    def __lt__(self, other, context=None): ...
-    def __le__(self, other, context=None): ...
-    def __gt__(self, other, context=None): ...
-    def __ge__(self, other, context=None): ...
-    def compare(self, other, context=None): ...
-    def __hash__(self): ...
-    def as_tuple(self): ...
-    def to_eng_string(self, context=None): ...
-    def __neg__(self, context=None): ...
-    def __pos__(self, context=None): ...
-    def __abs__(self, round=True, context=None): ...
-    def __add__(self, other, context=None): ...
-    def __radd__(self, other, context=None): ...
-    def __sub__(self, other, context=None): ...
-    def __rsub__(self, other, context=None): ...
-    def __mul__(self, other, context=None): ...
-    def __rmul__(self, other, context=None): ...
-    def __truediv__(self, other, context=None): ...
-    def __rtruediv__(self, other, context=None): ...
-    def __div__(self, other, context=None): ...
-    def __rdiv__(self, other, context=None): ...
-    def __divmod__(self, other, context=None): ...
-    def __rdivmod__(self, other, context=None): ...
-    def __mod__(self, other, context=None): ...
-    def __rmod__(self, other, context=None): ...
-    def remainder_near(self, other, context=None): ...
-    def __floordiv__(self, other, context=None): ...
-    def __rfloordiv__(self, other, context=None): ...
-    def __float__(self): ...
-    def __int__(self): ...
-    def __trunc__(self): ...
-    real = ...  # type: property
-    imag = ...  # type: property
-    def conjugate(self): ...
-    def __complex__(self): ...
-    def __long__(self): ...
-    def fma(self, other, third, context=None): ...
-    def __pow__(self, other, modulo=None, context=None): ...
-    def __rpow__(self, other, context=None): ...
-    def normalize(self, context=None): ...
-    def quantize(self, exp, rounding=None, context=None, watchexp=True): ...
-    def same_quantum(self, other): ...
-    def to_integral_exact(self, rounding=None, context=None): ...
-    def to_integral_value(self, rounding=None, context=None): ...
-    def to_integral(self, rounding=None, context=None): ...
-    def sqrt(self, context=None): ...
-    def max(self, other, context=None): ...
-    def min(self, other, context=None): ...
-    def adjusted(self): ...
-    def canonical(self, context=None): ...
-    def compare_signal(self, other, context=None): ...
-    def compare_total(self, other): ...
-    def compare_total_mag(self, other): ...
-    def copy_abs(self): ...
-    def copy_negate(self): ...
-    def copy_sign(self, other): ...
-    def exp(self, context=None): ...
-    def is_canonical(self): ...
-    def is_finite(self): ...
-    def is_infinite(self): ...
-    def is_nan(self): ...
-    def is_normal(self, context=None): ...
-    def is_qnan(self): ...
-    def is_signed(self): ...
-    def is_snan(self): ...
-    def is_subnormal(self, context=None): ...
-    def is_zero(self): ...
-    def ln(self, context=None): ...
-    def log10(self, context=None): ...
-    def logb(self, context=None): ...
-    def logical_and(self, other, context=None): ...
-    def logical_invert(self, context=None): ...
-    def logical_or(self, other, context=None): ...
-    def logical_xor(self, other, context=None): ...
-    def max_mag(self, other, context=None): ...
-    def min_mag(self, other, context=None): ...
-    def next_minus(self, context=None): ...
-    def next_plus(self, context=None): ...
-    def next_toward(self, other, context=None): ...
-    def number_class(self, context=None): ...
-    def radix(self): ...
-    def rotate(self, other, context=None): ...
-    def scaleb(self, other, context=None): ...
-    def shift(self, other, context=None): ...
+    def from_float(cls, f: float) -> Decimal: ...
+    def __nonzero__(self) -> bool: ...
+    def __eq__(self, other: object) -> bool: ...
+    def __ne__(self, other: object) -> bool: ...
+    def __lt__(self, other: _ComparableNum) -> bool: ...
+    def __le__(self, other: _ComparableNum) -> bool: ...
+    def __gt__(self, other: _ComparableNum) -> bool: ...
+    def __ge__(self, other: _ComparableNum) -> bool: ...
+    def compare(self, other: _Decimal) -> Decimal: ...
+    def __hash__(self) -> int: ...
+    def as_tuple(self) -> DecimalTuple: ...
+    def to_eng_string(self, context: Context = ...) -> str: ...
+    def __neg__(self) -> Decimal: ...
+    def __pos__(self) -> Decimal: ...
+    def __abs__(self, round: bool = True) -> Decimal: ...
+    def __add__(self, other: _Decimal) -> Decimal: ...
+    def __radd__(self, other: int) -> Decimal: ...
+    def __sub__(self, other: _Decimal) -> Decimal: ...
+    def __rsub__(self, other: int) -> Decimal: ...
+    def __mul__(self, other: _Decimal) -> Decimal: ...
+    def __rmul__(self, other: int) -> Decimal: ...
+    def __truediv__(self, other: _Decimal) -> Decimal: ...
+    def __rtruediv__(self, other: int) -> Decimal: ...
+    def __div__(self, other: _Decimal) -> Decimal: ...
+    def __rdiv__(self, other: int) -> Decimal: ...
+    def __divmod__(self, other: _Decimal) -> Tuple[Decimal, Decimal]: ...
+    def __rdivmod__(self, other: int) -> Tuple[Decimal, Decimal]: ...
+    def __mod__(self, other: _Decimal) -> Decimal: ...
+    def __rmod__(self, other: int) -> Decimal: ...
+    def remainder_near(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def __floordiv__(self, other: _Decimal) -> Decimal: ...
+    def __rfloordiv__(self, other: int) -> Decimal: ...
+    def __float__(self) -> float: ...
+    def __int__(self) -> int: ...
+    def __trunc__(self) -> int: ...
+    @property
+    def imag(self) -> Decimal: ...
+    @property
+    def real(self) -> Decimal: ...
+    def conjugate(self) -> Decimal: ...
+    def __complex__(self) -> complex: ...
+    def __long__(self) -> long: ...
+    def fma(self, other: _Decimal, third: _Decimal, context: Context = ...) -> Decimal: ...
+    def __pow__(self, other: _Decimal) -> Decimal: ...
+    def __rpow__(self, other: int) -> Decimal: ...
+    def normalize(self, context: Context = ...) -> Decimal: ...
+    def quantize(self, exp: _Decimal, rounding: str = ...,
+                 context: Context = ...) -> Decimal: ...
+    def same_quantum(self, other: Decimal) -> bool: ...
+    def to_integral(self, rounding: str = ..., context: Context = ...) -> Decimal: ...
+    def to_integral_exact(self, rounding: str = ..., context: Context = ...) -> Decimal: ...
+    def to_integral_value(self, rounding: str = ..., context: Context = ...) -> Decimal: ...
+    def sqrt(self, context: Context = ...) -> Decimal: ...
+    def max(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def min(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def adjusted(self) -> int: ...
+    def canonical(self, context: Context = ...) -> Decimal: ...
+    def compare_signal(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def compare_total(self, other: _Decimal) -> Decimal: ...
+    def compare_total_mag(self, other: _Decimal) -> Decimal: ...
+    def copy_abs(self) -> Decimal: ...
+    def copy_negate(self) -> Decimal: ...
+    def copy_sign(self, other: _Decimal) -> Decimal: ...
+    def exp(self, context: Context = ...) -> Decimal: ...
+    def is_canonical(self) -> bool: ...
+    def is_finite(self) -> bool: ...
+    def is_infinite(self) -> bool: ...
+    def is_nan(self) -> bool: ...
+    def is_normal(self, context: Context = ...) -> bool: ...
+    def is_qnan(self) -> bool: ...
+    def is_signed(self) -> bool: ...
+    def is_snan(self) -> bool: ...
+    def is_subnormal(self, context: Context = ...) -> bool: ...
+    def is_zero(self) -> bool: ...
+    def ln(self, context: Context = ...) -> Decimal: ...
+    def log10(self, context: Context = ...) -> Decimal: ...
+    def logb(self, context: Context = ...) -> Decimal: ...
+    def logical_and(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def logical_invert(self, context: Context = ...) -> Decimal: ...
+    def logical_or(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def logical_xor(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def max_mag(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def min_mag(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def next_minus(self, context: Context = ...) -> Decimal: ...
+    def next_plus(self, context: Context = ...) -> Decimal: ...
+    def next_toward(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def number_class(self, context: Context = ...) -> str: ...
+    def radix(self) -> Decimal: ...
+    def rotate(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def scaleb(self, other: _Decimal, context: Context = ...) -> Decimal: ...
+    def shift(self, other: _Decimal, context: Context = ...) -> Decimal: ...
     def __reduce__(self): ...
     def __copy__(self): ...
     def __deepcopy__(self, memo): ...
-    def __format__(self, specifier, context=None, _localeconv=None): ...
+    def __format__(self, specifier, context=None, _localeconv=None) -> str: ...
 
 class _ContextManager:
-    new_context = ...  # type: Any
-    def __init__(self, new_context): ...
-    saved_context = ...  # type: Any
+    new_context = ...  # type: Context 
+    saved_context = ...  # type: Context
+    def __init__(self, new_context: Context) -> None: ...
     def __enter__(self): ...
     def __exit__(self, t, v, tb): ...
 
 class Context:
-    prec = ...  # type: Any
-    rounding = ...  # type: Any
-    Emin = ...  # type: Any
-    Emax = ...  # type: Any
-    capitals = ...  # type: Any
-    traps = ...  # type: Any
+    prec = ...  # type: int 
+    rounding = ...  # type: str 
+    Emin = ...  # type: int 
+    Emax = ...  # type: int
+    capitals = ...  # type: int
+    traps = ...  # type: Dict[type, bool]
     flags = ...  # type: Any
     def __init__(self, prec=None, rounding=None, traps=None, flags=None, Emin=None, Emax=None, capitals=None, _clamp=0, _ignored_flags=None): ...
     def clear_flags(self): ...
@@ -226,6 +240,6 @@ class Context:
     def to_integral_value(self, a): ...
     def to_integral(self, a): ...
 
-DefaultContext = ...  # type: Any
-BasicContext = ...  # type: Any
-ExtendedContext = ...  # type: Any
+DefaultContext = ...  # type: Context
+BasicContext = ...  # type: Context
+ExtendedContext = ...  # type: Context
diff --git a/typeshed/stdlib/2.7/difflib.pyi b/typeshed/stdlib/2.7/difflib.pyi
index 5580ce6..e0809f8 100644
--- a/typeshed/stdlib/2.7/difflib.pyi
+++ b/typeshed/stdlib/2.7/difflib.pyi
@@ -6,13 +6,13 @@
 
 from typing import (
     TypeVar, Callable, Iterable, Iterator, List, NamedTuple, Sequence, Tuple,
-    Generic
+    Generic, Optional
 )
 
 _T = TypeVar('_T')
 
 class SequenceMatcher(Generic[_T]):
-    def __init__(self, isjunk: Callable[[_T], bool] = ...,
+    def __init__(self, isjunk: Optional[Callable[[_T], bool]] = ...,
                  a: Sequence[_T] = ..., b: Sequence[_T] = ...,
                  autojunk: bool = ...) -> None: ...
     def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ...
diff --git a/typeshed/stdlib/2.7/fileinput.pyi b/typeshed/stdlib/2.7/fileinput.pyi
new file mode 100644
index 0000000..e61b41f
--- /dev/null
+++ b/typeshed/stdlib/2.7/fileinput.pyi
@@ -0,0 +1,46 @@
+from typing import Iterable, Callable, IO, Optional, Union, Iterator
+
+class FileInput(Iterable[str]):
+    def __init__(
+        self,
+        files: Optional[Union[str, Iterable[str]]] = None,
+        inplace: bool = ...,
+        backup: str = ...,
+        bufsize: int = ...,
+        mode: str = ...,
+        openhook: Callable[[str, str], IO[str]] = ...
+        ) -> None: ...
+
+    def __del__(self) -> None: ...
+    def close(self) -> None: ...
+    def __iter__(self) -> Iterator[str]: ...
+    def __getitem__(self, i: Union[int, slice]) -> str: ...
+    def next(self) -> str: ...
+    def nextfile(self) -> None: ...
+    def readline(self) -> str: ...
+    def filename(self) -> Optional[str]: ...
+    def lineno(self) -> int: ...
+    def filelineno(self) -> int: ...
+    def fileno(self) -> int: ...
+    def isfirstline(self) -> bool: ...
+    def isstdin(self) -> bool: ...
+
+def input(
+    files: Optional[Union[str, Iterable[str]]] = None,
+    inplace: bool = ...,
+    backup: str = ...,
+    bufsize: int = ...,
+    mode: str = ...,
+    openhook: Callable[[str, str], IO[str]] = ...) -> FileInput: ...
+
+
+def filename() -> Optional[str]: ...
+def lineno() -> int: ...
+def filelineno() -> int: ...
+def isfirstline() -> bool: ...
+def isstdin() -> bool: ...
+def nextfile() -> None: ...
+def close() -> None: ...
+
+def hook_compressed(filename: str, mode: str) -> IO[str]: ...
+def hook_encoded(encoding: str) -> Callable[[str, str], IO[str]]: ...
diff --git a/typeshed/stdlib/2.7/httplib.pyi b/typeshed/stdlib/2.7/httplib.pyi
index c0d1178..6607954 100644
--- a/typeshed/stdlib/2.7/httplib.pyi
+++ b/typeshed/stdlib/2.7/httplib.pyi
@@ -6,8 +6,6 @@
 from typing import Any, Dict
 import mimetools
 
-responses = ... # type: Dict[int, str]
-
 class HTTPMessage(mimetools.Message):
     def addheader(self, key: str, value: str) -> None: ...
     def addcontinue(self, key: str, more: str) -> None: ...
@@ -122,3 +120,70 @@ class LineAndFileWrapper:
     def read(self, amt=None): ...
     def readline(self): ...
     def readlines(self, size=None): ...
+
+# Constants
+
+responses = ... # type: Dict[int, str]
+
+HTTP_PORT = ... # type: int
+HTTPS_PORT = ... # type: int
+
+# status codes
+# informational
+CONTINUE = ... # type: int
+SWITCHING_PROTOCOLS = ... # type: int
+PROCESSING = ... # type: int
+
+# successful
+OK = ... # type: int
+CREATED = ... # type: int
+ACCEPTED = ... # type: int
+NON_AUTHORITATIVE_INFORMATION = ... # type: int
+NO_CONTENT = ... # type: int
+RESET_CONTENT = ... # type: int
+PARTIAL_CONTENT = ... # type: int
+MULTI_STATUS = ... # type: int
+IM_USED = ... # type: int
+
+# redirection
+MULTIPLE_CHOICES = ... # type: int
+MOVED_PERMANENTLY = ... # type: int
+FOUND = ... # type: int
+SEE_OTHER = ... # type: int
+NOT_MODIFIED = ... # type: int
+USE_PROXY = ... # type: int
+TEMPORARY_REDIRECT = ... # type: int
+
+# client error
+BAD_REQUEST = ... # type: int
+UNAUTHORIZED = ... # type: int
+PAYMENT_REQUIRED = ... # type: int
+FORBIDDEN = ... # type: int
+NOT_FOUND = ... # type: int
+METHOD_NOT_ALLOWED = ... # type: int
+NOT_ACCEPTABLE = ... # type: int
+PROXY_AUTHENTICATION_REQUIRED = ... # type: int
+REQUEST_TIMEOUT = ... # type: int
+CONFLICT = ... # type: int
+GONE = ... # type: int
+LENGTH_REQUIRED = ... # type: int
+PRECONDITION_FAILED = ... # type: int
+REQUEST_ENTITY_TOO_LARGE = ... # type: int
+REQUEST_URI_TOO_LONG = ... # type: int
+UNSUPPORTED_MEDIA_TYPE = ... # type: int
+REQUESTED_RANGE_NOT_SATISFIABLE = ... # type: int
+EXPECTATION_FAILED = ... # type: int
+UNPROCESSABLE_ENTITY = ... # type: int
+LOCKED = ... # type: int
+FAILED_DEPENDENCY = ... # type: int
+UPGRADE_REQUIRED = ... # type: int
+
+# server error
+INTERNAL_SERVER_ERROR = ... # type: int
+NOT_IMPLEMENTED = ... # type: int
+BAD_GATEWAY = ... # type: int
+SERVICE_UNAVAILABLE = ... # type: int
+GATEWAY_TIMEOUT = ... # type: int
+HTTP_VERSION_NOT_SUPPORTED = ... # type: int
+INSUFFICIENT_STORAGE = ... # type: int
+NOT_EXTENDED = ... # type: int
diff --git a/typeshed/stdlib/2.7/inspect.pyi b/typeshed/stdlib/2.7/inspect.pyi
index 4800b29..8abb2a0 100644
--- a/typeshed/stdlib/2.7/inspect.pyi
+++ b/typeshed/stdlib/2.7/inspect.pyi
@@ -10,7 +10,7 @@ ModuleInfo = NamedTuple('ModuleInfo', [('name', str),
                                        ])
 def getmembers(object: object,
                predicate: Callable[[Any], bool] = ...
-              ) -> List[Tuple[str, object]]: ...
+              ) -> List[Tuple[str, Any]]: ...
 def getmoduleinfo(path: str) -> Optional[ModuleInfo]: ...
 def getmodulename(path: str) -> Optional[str]: ...
 
diff --git a/typeshed/stdlib/2.7/io.pyi b/typeshed/stdlib/2.7/io.pyi
index 68a1b2b..3763463 100644
--- a/typeshed/stdlib/2.7/io.pyi
+++ b/typeshed/stdlib/2.7/io.pyi
@@ -41,6 +41,7 @@ class BytesIO(BinaryIO):
     def read1(self) -> str: ...
 
     def __iter__(self) -> Iterator[str]: ...
+    def next(self) -> str: ...
     def __enter__(self) -> 'BytesIO': ...
     def __exit__(self, type, value, traceback) -> bool: ...
 
@@ -67,6 +68,7 @@ class StringIO(TextIO):
     def getvalue(self) -> unicode: ...
 
     def __iter__(self) -> Iterator[unicode]: ...
+    def next(self) -> unicode: ...
     def __enter__(self) -> 'StringIO': ...
     def __exit__(self, type, value, traceback) -> bool: ...
 
@@ -95,6 +97,7 @@ class TextIOWrapper(TextIO):
     def writelines(self, lines: Iterable[unicode]) -> None: ...
 
     def __iter__(self) -> Iterator[unicode]: ...
+    def next(self) -> unicode: ...
     def __enter__(self) -> StringIO: ...
     def __exit__(self, type, value, traceback) -> bool: ...
 
diff --git a/typeshed/stdlib/2.7/numbers.pyi b/typeshed/stdlib/2.7/numbers.pyi
deleted file mode 100644
index f55611a..0000000
--- a/typeshed/stdlib/2.7/numbers.pyi
+++ /dev/null
@@ -1,77 +0,0 @@
-# Stubs for numbers (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-class Number:
-    __metaclass__ = ... # type: Any
-    __hash__ = ... # type: Any
-
-class Complex(Number):
-    def __complex__(self): ...
-    def __nonzero__(self): ...
-    def real(self): ...
-    def imag(self): ...
-    def __add__(self, other): ...
-    def __radd__(self, other): ...
-    def __neg__(self): ...
-    def __pos__(self): ...
-    def __sub__(self, other): ...
-    def __rsub__(self, other): ...
-    def __mul__(self, other): ...
-    def __rmul__(self, other): ...
-    def __div__(self, other): ...
-    def __rdiv__(self, other): ...
-    def __truediv__(self, other): ...
-    def __rtruediv__(self, other): ...
-    def __pow__(self, exponent): ...
-    def __rpow__(self, base): ...
-    def __abs__(self): ...
-    def conjugate(self): ...
-    def __eq__(self, other): ...
-    def __ne__(self, other): ...
-
-class Real(Complex):
-    def __float__(self): ...
-    def __trunc__(self): ...
-    def __divmod__(self, other): ...
-    def __rdivmod__(self, other): ...
-    def __floordiv__(self, other): ...
-    def __rfloordiv__(self, other): ...
-    def __mod__(self, other): ...
-    def __rmod__(self, other): ...
-    def __lt__(self, other): ...
-    def __le__(self, other): ...
-    def __complex__(self): ...
-    @property
-    def real(self): ...
-    @property
-    def imag(self): ...
-    def conjugate(self): ...
-
-class Rational(Real):
-    def numerator(self): ...
-    def denominator(self): ...
-    def __float__(self): ...
-
-class Integral(Rational):
-    def __long__(self): ...
-    def __index__(self): ...
-    def __pow__(self, exponent, modulus=...): ...
-    def __lshift__(self, other): ...
-    def __rlshift__(self, other): ...
-    def __rshift__(self, other): ...
-    def __rrshift__(self, other): ...
-    def __and__(self, other): ...
-    def __rand__(self, other): ...
-    def __xor__(self, other): ...
-    def __rxor__(self, other): ...
-    def __or__(self, other): ...
-    def __ror__(self, other): ...
-    def __invert__(self): ...
-    def __float__(self): ...
-    @property
-    def numerator(self): ...
-    @property
-    def denominator(self): ...
diff --git a/typeshed/stdlib/2.7/os/__init__.pyi b/typeshed/stdlib/2.7/os/__init__.pyi
index 4d8e014..40f5ed0 100644
--- a/typeshed/stdlib/2.7/os/__init__.pyi
+++ b/typeshed/stdlib/2.7/os/__init__.pyi
@@ -197,8 +197,22 @@ def kill(pid: int, sig: int) -> None: ...
 def killpg(pgid: int, sig: int) -> None: ...
 def nice(increment: int) -> int: ...
 
-# TODO: plock, popen*, spawn*, P_*
-
+# TODO: plock, popen*, P_*
+
+def spawnl(mode: int, path: AnyStr, arg0: AnyStr, *args: AnyStr) -> int: ...
+def spawnle(mode: int, path: AnyStr, arg0: AnyStr,
+            *args: Any) -> int: ... # Imprecise sig
+def spawnlp(mode: int, file: AnyStr, arg0: AnyStr,
+            *args: AnyStr) -> int: ...  # Unix only TODO
+def spawnlpe(mode: int, file: AnyStr, arg0: AnyStr, *args: Any) -> int:
+    ... # Imprecise signature; Unix only TODO
+def spawnv(mode: int, path: AnyStr, args: List[AnyStr]) -> int: ...
+def spawnve(mode: int, path: AnyStr, args: List[AnyStr],
+            env: Mapping[str, str]) -> int: ...
+def spawnvp(mode: int, file: AnyStr, args: List[AnyStr]) -> int: ...  # Unix only
+def spawnvpe(mode: int, file: AnyStr, args: List[AnyStr],
+             env: Mapping[str, str]) -> int:
+    ...  # Unix only
 def startfile(path: unicode, operation: str = ...) -> None: ... # Windows only
 def system(command: unicode) -> int: ...
 def times() -> Tuple[float, float, float, float, float]: ...
diff --git a/typeshed/stdlib/2.7/socket.pyi b/typeshed/stdlib/2.7/socket.pyi
index d99a127..4975c85 100644
--- a/typeshed/stdlib/2.7/socket.pyi
+++ b/typeshed/stdlib/2.7/socket.pyi
@@ -278,7 +278,7 @@ class socket:
     proto = 0
 
     def __init__(self, family: int = ..., type: int = ...,
-                 proto: int = ..., fileno: int = ...) -> None: ...
+                 proto: int = ...) -> None: ...
 
     # --- methods ---
     # second tuple item is an address
@@ -322,7 +322,7 @@ class socket:
     def sendto(self, data: str, address: Union[tuple, str], flags: int = ...) -> int: ...
     def setblocking(self, flag: bool) -> None: ...
     def settimeout(self, value: Union[float, None]) -> None: ...
-    def setsockopt(self, level: int, optname: int, value: Union[int, str]) -> None: ...
+    def setsockopt(self, level: int, optname: int, value: Union[int, bytes]) -> None: ...
     def shutdown(self, how: int) -> None: ...
 
 
diff --git a/typeshed/stdlib/2.7/string.pyi b/typeshed/stdlib/2.7/string.pyi
index 09fcc5d..4bbfb48 100644
--- a/typeshed/stdlib/2.7/string.pyi
+++ b/typeshed/stdlib/2.7/string.pyi
@@ -52,8 +52,8 @@ class Template(object):
     template = ...  # type: str
 
     def __init__(self, template: str) -> None: ...
-    def substitute(self, mapping: Mapping[str, str], **kwds: str) -> str: ...
-    def safe_substitute(self, mapping: Mapping[str, str],
+    def substitute(self, mapping: Mapping[str, str] = ..., **kwds: str) -> str: ...
+    def safe_substitute(self, mapping: Mapping[str, str] = ...,
                         **kwds: str) -> str: ...
 
 # TODO(MichalPokorny): This is probably badly and/or loosely typed.
diff --git a/typeshed/stdlib/2.7/subprocess.pyi b/typeshed/stdlib/2.7/subprocess.pyi
index f3b1eb4..8286e83 100644
--- a/typeshed/stdlib/2.7/subprocess.pyi
+++ b/typeshed/stdlib/2.7/subprocess.pyi
@@ -87,8 +87,7 @@ class Popen:
 
     def poll(self) -> int: ...
     def wait(self) -> int: ...
-    # Return str/bytes
-    def communicate(self, input: Union[str, unicode] = ...) -> Tuple[str, str]: ...
+    def communicate(self, input: Union[bytes, unicode] = ...) -> Tuple[Optional[bytes], Optional[bytes]]: ...
     def send_signal(self, signal: int) -> None: ...
     def terminate(self) -> None: ...
     def kill(self) -> None: ...
diff --git a/typeshed/stdlib/2.7/tarfile.pyi b/typeshed/stdlib/2.7/tarfile.pyi
deleted file mode 100644
index 6672135..0000000
--- a/typeshed/stdlib/2.7/tarfile.pyi
+++ /dev/null
@@ -1,239 +0,0 @@
-# Stubs for tarfile (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-class TarError(Exception): ...
-class ExtractError(TarError): ...
-class ReadError(TarError): ...
-class CompressionError(TarError): ...
-class StreamError(TarError): ...
-class HeaderError(TarError): ...
-class EmptyHeaderError(HeaderError): ...
-class TruncatedHeaderError(HeaderError): ...
-class EOFHeaderError(HeaderError): ...
-class InvalidHeaderError(HeaderError): ...
-class SubsequentHeaderError(HeaderError): ...
-
-class _LowLevelFile:
-    fd = ... # type: Any
-    def __init__(self, name, mode) -> None: ...
-    def close(self): ...
-    def read(self, size): ...
-    def write(self, s): ...
-
-class _Stream:
-    name = ... # type: Any
-    mode = ... # type: Any
-    comptype = ... # type: Any
-    fileobj = ... # type: Any
-    bufsize = ... # type: Any
-    buf = ... # type: Any
-    pos = ... # type: Any
-    closed = ... # type: Any
-    zlib = ... # type: Any
-    crc = ... # type: Any
-    dbuf = ... # type: Any
-    cmp = ... # type: Any
-    def __init__(self, name, mode, comptype, fileobj, bufsize) -> None: ...
-    def __del__(self): ...
-    def write(self, s): ...
-    def close(self): ...
-    def tell(self): ...
-    def seek(self, pos=...): ...
-    def read(self, size=...): ...
-
-class _StreamProxy:
-    fileobj = ... # type: Any
-    buf = ... # type: Any
-    def __init__(self, fileobj) -> None: ...
-    def read(self, size): ...
-    def getcomptype(self): ...
-    def close(self): ...
-
-class _BZ2Proxy:
-    blocksize = ... # type: Any
-    fileobj = ... # type: Any
-    mode = ... # type: Any
-    name = ... # type: Any
-    def __init__(self, fileobj, mode) -> None: ...
-    pos = ... # type: Any
-    bz2obj = ... # type: Any
-    buf = ... # type: Any
-    def init(self): ...
-    def read(self, size): ...
-    def seek(self, pos): ...
-    def tell(self): ...
-    def write(self, data): ...
-    def close(self): ...
-
-class _FileInFile:
-    fileobj = ... # type: Any
-    offset = ... # type: Any
-    size = ... # type: Any
-    sparse = ... # type: Any
-    position = ... # type: Any
-    def __init__(self, fileobj, offset, size, sparse=...) -> None: ...
-    def tell(self): ...
-    def seek(self, position): ...
-    def read(self, size=...): ...
-    def readnormal(self, size): ...
-    def readsparse(self, size): ...
-    def readsparsesection(self, size): ...
-
-class ExFileObject:
-    blocksize = ... # type: Any
-    fileobj = ... # type: Any
-    name = ... # type: Any
-    mode = ... # type: Any
-    closed = ... # type: Any
-    size = ... # type: Any
-    position = ... # type: Any
-    buffer = ... # type: Any
-    def __init__(self, tarfile, tarinfo) -> None: ...
-    def read(self, size=...): ...
-    def readline(self, size=...): ...
-    def readlines(self): ...
-    def tell(self): ...
-    def seek(self, pos, whence=...): ...
-    def close(self): ...
-    def __iter__(self): ...
-
-class TarInfo:
-    name = ... # type: Any
-    mode = ... # type: Any
-    uid = ... # type: Any
-    gid = ... # type: Any
-    size = ... # type: Any
-    mtime = ... # type: Any
-    chksum = ... # type: Any
-    type = ... # type: Any
-    linkname = ... # type: Any
-    uname = ... # type: Any
-    gname = ... # type: Any
-    devmajor = ... # type: Any
-    devminor = ... # type: Any
-    offset = ... # type: Any
-    offset_data = ... # type: Any
-    pax_headers = ... # type: Any
-    def __init__(self, name=...) -> None: ...
-    path = ... # type: Any
-    linkpath = ... # type: Any
-    def get_info(self, encoding, errors): ...
-    def tobuf(self, format=..., encoding=..., errors=...): ...
-    def create_ustar_header(self, info): ...
-    def create_gnu_header(self, info): ...
-    def create_pax_header(self, info, encoding, errors): ...
-    @classmethod
-    def create_pax_global_header(cls, pax_headers): ...
-    @classmethod
-    def frombuf(cls, buf): ...
-    @classmethod
-    def fromtarfile(cls, tarfile): ...
-    def isreg(self): ...
-    def isfile(self): ...
-    def isdir(self): ...
-    def issym(self): ...
-    def islnk(self): ...
-    def ischr(self): ...
-    def isblk(self): ...
-    def isfifo(self): ...
-    def issparse(self): ...
-    def isdev(self): ...
-
-class TarFile:
-    debug = ... # type: Any
-    dereference = ... # type: Any
-    ignore_zeros = ... # type: Any
-    errorlevel = ... # type: Any
-    format = ... # type: Any
-    encoding = ... # type: Any
-    errors = ... # type: Any
-    tarinfo = ... # type: Any
-    fileobject = ... # type: Any
-    mode = ... # type: Any
-    name = ... # type: Any
-    fileobj = ... # type: Any
-    pax_headers = ... # type: Any
-    closed = ... # type: Any
-    members = ... # type: Any
-    offset = ... # type: Any
-    inodes = ... # type: Any
-    firstmember = ... # type: Any
-    def __init__(self, name=..., mode=..., fileobj=..., format=..., tarinfo=..., dereference=..., ignore_zeros=..., encoding=..., errors=..., pax_headers=..., debug=..., errorlevel=...) -> None: ...
-    posix = ... # type: Any
-    @classmethod
-    def open(cls, name=..., mode=..., fileobj=..., bufsize=..., **kwargs): ...
-    @classmethod
-    def taropen(cls, name, mode=..., fileobj=..., **kwargs): ...
-    @classmethod
-    def gzopen(cls, name, mode=..., fileobj=..., compresslevel=..., **kwargs): ...
-    @classmethod
-    def bz2open(cls, name, mode=..., fileobj=..., compresslevel=..., **kwargs): ...
-    OPEN_METH = ... # type: Any
-    def close(self): ...
-    def getmember(self, name): ...
-    def getmembers(self): ...
-    def getnames(self): ...
-    def gettarinfo(self, name=..., arcname=..., fileobj=...): ...
-    def list(self, verbose=...): ...
-    def add(self, name, arcname=..., recursive=..., exclude=..., filter=...): ...
-    def addfile(self, tarinfo, fileobj=...): ...
-    def extractall(self, path=..., members=...): ...
-    def extract(self, member, path=...): ...
-    def extractfile(self, member): ...
-    def makedir(self, tarinfo, targetpath): ...
-    def makefile(self, tarinfo, targetpath): ...
-    def makeunknown(self, tarinfo, targetpath): ...
-    def makefifo(self, tarinfo, targetpath): ...
-    def makedev(self, tarinfo, targetpath): ...
-    def makelink(self, tarinfo, targetpath): ...
-    def chown(self, tarinfo, targetpath): ...
-    def chmod(self, tarinfo, targetpath): ...
-    def utime(self, tarinfo, targetpath): ...
-    def next(self): ...
-    def __iter__(self): ...
-    def __enter__(self): ...
-    def __exit__(self, type, value, traceback): ...
-
-class TarIter:
-    tarfile = ... # type: Any
-    index = ... # type: Any
-    def __init__(self, tarfile) -> None: ...
-    def __iter__(self): ...
-    def next(self): ...
-
-class _section:
-    offset = ... # type: Any
-    size = ... # type: Any
-    def __init__(self, offset, size) -> None: ...
-    def __contains__(self, offset): ...
-
-class _data(_section):
-    realpos = ... # type: Any
-    def __init__(self, offset, size, realpos) -> None: ...
-
-class _hole(_section): ...
-
-class _ringbuffer(list):
-    idx = ... # type: Any
-    def __init__(self) -> None: ...
-    def find(self, offset): ...
-
-class TarFileCompat:
-    tarfile = ... # type: Any
-    def __init__(self, file, mode=..., compression=...) -> None: ...
-    def namelist(self): ...
-    def infolist(self): ...
-    def printdir(self): ...
-    def testzip(self): ...
-    def getinfo(self, name): ...
-    def read(self, name): ...
-    def write(self, filename, arcname=..., compress_type=...): ...
-    def writestr(self, zinfo, bytes): ...
-    def close(self): ...
-
-def is_tarfile(name): ...
-
-open = TarFile.open
diff --git a/typeshed/stdlib/2.7/typing.pyi b/typeshed/stdlib/2.7/typing.pyi
index dc1497c..90a2229 100644
--- a/typeshed/stdlib/2.7/typing.pyi
+++ b/typeshed/stdlib/2.7/typing.pyi
@@ -5,7 +5,6 @@ from abc import abstractmethod, ABCMeta
 # Definitions of special type checking related constructs.  Their definition
 # are not used, so their value does not matter.
 
-cast = object()
 overload = object()
 Any = object()
 TypeVar = object()
@@ -15,8 +14,6 @@ Callable = object()
 Type = object()
 builtinclass = object()
 _promote = object()
-NamedTuple = object()
-NewType = object()
 
 # Type aliases
 
@@ -176,17 +173,21 @@ class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]):
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_VT_co]: ...
 
-class Mapping(Sized, Iterable[_KT], Container[_KT], Generic[_KT, _VT]):
+class Mapping(Sized, Iterable[_KT], Container[_KT], Generic[_KT, _VT_co]):
+    # TODO: We wish the key type could also be covariant, but that doesn't work,
+    # see discussion in https://github.com/python/typing/pull/273.
     @abstractmethod
-    def __getitem__(self, k: _KT) -> _VT: ...
+    def __getitem__(self, k: _KT) -> _VT_co:
+        ...
     # Mixin methods
-    def get(self, k: _KT, default: _VT = ...) -> _VT: ...
+    def get(self, k: _KT, default: _VT_co = ...) -> _VT_co:  # type: ignore
+        ...
     def keys(self) -> list[_KT]: ...
-    def values(self) -> list[_VT]: ...
-    def items(self) -> list[Tuple[_KT, _VT]]: ...
+    def values(self) -> list[_VT_co]: ...
+    def items(self) -> list[Tuple[_KT, _VT_co]]: ...
     def iterkeys(self) -> Iterator[_KT]: ...
-    def itervalues(self) -> Iterator[_VT]: ...
-    def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ...
+    def itervalues(self) -> Iterator[_VT_co]: ...
+    def iteritems(self) -> Iterator[Tuple[_KT, _VT_co]]: ...
     def __contains__(self, o: object) -> bool: ...
 
 class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
@@ -208,7 +209,7 @@ Text = unicode
 
 TYPE_CHECKING = True
 
-class IO(Iterable[AnyStr], Generic[AnyStr]):
+class IO(Iterator[AnyStr], Generic[AnyStr]):
     # TODO detach
     # TODO use abstract properties
     @property
@@ -251,11 +252,15 @@ class IO(Iterable[AnyStr], Generic[AnyStr]):
     def writelines(self, lines: Iterable[AnyStr]) -> None: ...
 
     @abstractmethod
+    def next(self) -> AnyStr: ...
+    @abstractmethod
     def __iter__(self) -> Iterator[AnyStr]: ...
     @abstractmethod
     def __enter__(self) -> 'IO[AnyStr]': ...
     @abstractmethod
-    def __exit__(self, t: type, value: Any, traceback: Any) -> bool: ...
+    def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException],
+                 # TODO: traceback should be TracebackType but that's defined in types
+                 traceback: Optional[Any]) -> bool: ...
 
 class BinaryIO(IO[str]):
     # TODO readinto
@@ -342,3 +347,14 @@ class Pattern(Generic[AnyStr]):
 # Functions
 
 def get_type_hints(obj: Callable) -> dict[str, Any]: ...
+
+def cast(tp: Type[_T], obj: Any) -> _T: ...
+
+# Type constructors
+
+# NamedTuple is special-cased in the type checker; the initializer is ignored.
+def NamedTuple(typename: str, fields: Iterable[Tuple[str, Any]], *,
+               verbose: bool = ..., rename: bool = ...) -> Type[tuple]: ...
+
+def NewType(name: str, tp: Type[_T]) -> Type[_T]: ...
+
diff --git a/typeshed/stdlib/2.7/unittest.pyi b/typeshed/stdlib/2.7/unittest.pyi
index 9c9ba90..318405a 100644
--- a/typeshed/stdlib/2.7/unittest.pyi
+++ b/typeshed/stdlib/2.7/unittest.pyi
@@ -69,23 +69,33 @@ class TestCase(Testable):
     def assertTrue(self, expr: Any, msg: object = ...) -> None: ...
     def assertEqual(self, first: Any, second: Any,
                     msg: object = ...) -> None: ...
+    def assertEquals(self, first: Any, second: Any,
+                     msg: object = ...) -> None: ...
     def failUnlessEqual(self, first: Any, second: Any,
                         msg: object = ...) -> None: ...
     def assertNotEqual(self, first: Any, second: Any,
                        msg: object = ...) -> None: ...
+    def assertNotEquals(self, first: Any, second: Any,
+                        msg: object = ...) -> None: ...
     def failIfEqual(self, first: Any, second: Any,
                     msg: object = ...) -> None: ...
     def assertAlmostEqual(self, first: float, second: float, places: int = ...,
                           msg: object = ...,
                           delta: float = ...) -> None: ...
-    def failUnlessAlmostEqual(self, first: float, second: float,
-                              places: int = ...,
+    def assertAlmostEquals(self, first: float, second: float, places: int = ...,
+                           msg: object = ...,
+                           delta: float = ...) -> None: ...
+    def failUnlessAlmostEqual(self, first: float, second: float, places: int = ...,
                               msg: object = ...) -> None: ...
-    def assertNotAlmostEqual(self, first: float, second: float,
-                             places: int = ..., msg: object = ...,
+    def assertNotAlmostEqual(self, first: float, second: float, places: int = ...,
+                             msg: object = ...,
                              delta: float = ...) -> None: ...
+    def assertNotAlmostEquals(self, first: float, second: float, places: int = ...,
+                              msg: object = ...,
+                              delta: float = ...) -> None: ...
     def failIfAlmostEqual(self, first: float, second: float, places: int = ...,
-                          msg: object = ...) -> None: ...
+                          msg: object = ...,
+                          delta: float = ...) -> None: ...
     def assertGreater(self, first: Any, second: Any,
                       msg: object = ...) -> None: ...
     def assertGreaterEqual(self, first: Any, second: Any,
@@ -93,8 +103,7 @@ class TestCase(Testable):
     def assertMultiLineEqual(self, first: str, second: str,
                              msg: object = ...) -> None: ...
     def assertSequenceEqual(self, first: Sequence[Any], second: Sequence[Any],
-                            msg: object = ...,
-                            seq_type: type = ...) -> None: ...
+                            msg: object = ..., seq_type: type = ...) -> None: ...
     def assertListEqual(self, first: List[Any], second: List[Any],
                         msg: object = ...) -> None: ...
     def assertTupleEqual(self, first: Tuple[Any, ...], second: Tuple[Any, ...],
@@ -108,6 +117,7 @@ class TestCase(Testable):
     def assertLessEqual(self, first: Any, second: Any,
                         msg: object = ...) -> None: ...
     def assertRaises(self, expected_exception: type, *args: Any, **kwargs: Any) -> Any: ...
+    def failUnlessRaises(self, expected_exception: type, *args: Any, **kwargs: Any) -> Any: ...
     def failIf(self, expr: Any, msg: object = ...) -> None: ...
     def assertFalse(self, expr: Any, msg: object = ...) -> None: ...
     def assertIs(self, first: object, second: object,
@@ -134,20 +144,6 @@ class TestCase(Testable):
     def addCleanup(function: Any, *args: Any, **kwargs: Any) -> None: ...
     def skipTest(self, reason: Any) -> None: ...
 
-    assertEquals = assertEqual
-    assertNotEquals = assertNotEqual
-    assertAlmostEquals = assertAlmostEqual
-    assertNotAlmostEquals = assertNotAlmostEqual
-    assert_ = assertTrue
-
-    failUnlessEqual = assertEqual
-    failIfEqual = assertNotEqual
-    failUnlessAlmostEqual = assertAlmostEqual
-    failIfAlmostEqual = assertNotAlmostEqual
-    failUnless = assertTrue
-    failUnlessRaises = assertRaises
-    failIf = assertFalse
-
 class CallableTestCase(Testable):
     def __init__(self, testFunc: Callable[[], None],
                  setUp: Callable[[], None] = ...,
@@ -184,3 +180,6 @@ def skip(reason: str) -> Any: ...
 def main(module: str = ..., defaultTest: str = ...,
          argv: List[str] = ..., testRunner: Any = ...,
          testLoader: Any = ...) -> None: ... # TODO types
+
+# private but occasionally used
+util = ...  # type: module
diff --git a/typeshed/stdlib/2.7/weakref.pyi b/typeshed/stdlib/2.7/weakref.pyi
index 93fd9e8..a68242b 100644
--- a/typeshed/stdlib/2.7/weakref.pyi
+++ b/typeshed/stdlib/2.7/weakref.pyi
@@ -1,74 +1,47 @@
 # Stubs for weakref (Python 2)
 #
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
+# Based on stub automatically generated by stubgen.
 
-from typing import Any
-import UserDict
-from _weakref import (
-    getweakrefcount as getweakrefcount,
-    getweakrefs as getweakrefs,
-    ref as ref,
-    proxy as proxy,
-    CallableProxyType as CallableProxyType,
-    ProxyType as ProxyType,
-    ReferenceType as ReferenceType
-)
-from _weakrefset import WeakSet as WeakSet
-from exceptions import ReferenceError as ReferenceError
+from typing import Any, MutableMapping, Generic, Iterator, List, TypeVar
+from _weakref import (getweakrefcount, getweakrefs, ref, proxy,
+                      CallableProxyType, ProxyType, ReferenceType)
+from _weakrefset import WeakSet
 
 ProxyTypes = ... # type: Any
 
-class WeakValueDictionary(UserDict.UserDict):
-    def __init__(self, *args, **kw): ...
-    def __getitem__(self, key): ...
-    def __delitem__(self, key): ...
-    def __contains__(self, key): ...
-    def has_key(self, key): ...
-    def __setitem__(self, key, value): ...
-    def clear(self): ...
-    def copy(self): ...
-    __copy__ = ... # type: Any
-    def __deepcopy__(self, memo): ...
-    def get(self, key, default=None): ...
-    def items(self): ...
-    def iteritems(self): ...
-    def iterkeys(self): ...
-    __iter__ = ... # type: Any
-    def itervaluerefs(self): ...
-    def itervalues(self): ...
-    def popitem(self): ...
-    def pop(self, key, *args): ...
-    def setdefault(self, key, default=None): ...
-    def update(self, dict=None, **kwargs): ...
-    def valuerefs(self): ...
-    def values(self): ...
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
 
+# Don't inherit from typing.Dict since
+# isinstance(weakref.WeakValueDictionary(), dict) is False
+class WeakValueDictionary(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+    def itervaluerefs(self) -> Iterator[ReferenceType[_VT]]: ...
+    def valuerefs(self) -> List[ReferenceType[_VT]]: ...
+
+    def __setitem__(self, k: _KT, v: _VT) -> None: ...
+    def __delitem__(self, v: _KT) -> None: ...
+    def __getitem__(self, k: _KT) -> _VT: ...
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[_KT]: ...
+
+    def has_key(self, key: _KT) -> bool: ...
+    def copy(self) -> WeakValueDictionary[_KT, _VT]: ...
+
+class WeakKeyDictionary(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+    def iterkeyrefs(self) -> Iterator[ReferenceType[_KT]]: ...
+    def keyrefs(self) -> List[ReferenceType[_KT]]: ...
+
+    def __setitem__(self, k: _KT, v: _VT) -> None: ...
+    def __delitem__(self, v: _KT) -> None: ...
+    def __getitem__(self, k: _KT) -> _VT: ...
+    def __len__(self) -> int: ...
+    def __iter__(self) -> Iterator[_KT]: ...
+
+    def has_key(self, key: _KT) -> bool: ...
+    def copy(self) -> WeakKeyDictionary[_KT, _VT]: ...
+
+# TODO: make generic
 class KeyedRef(ReferenceType):
     key = ... # type: Any
     def __new__(type, ob, callback, key): ...
     def __init__(self, ob, callback, key): ...
-
-class WeakKeyDictionary(UserDict.UserDict):
-    data = ... # type: Any
-    def __init__(self, dict=None): ...
-    def __delitem__(self, key): ...
-    def __getitem__(self, key): ...
-    def __setitem__(self, key, value): ...
-    def copy(self): ...
-    __copy__ = ... # type: Any
-    def __deepcopy__(self, memo): ...
-    def get(self, key, default=None): ...
-    def has_key(self, key): ...
-    def __contains__(self, key): ...
-    def items(self): ...
-    def iteritems(self): ...
-    def iterkeyrefs(self): ...
-    def iterkeys(self): ...
-    __iter__ = ... # type: Any
-    def itervalues(self): ...
-    def keyrefs(self): ...
-    def keys(self): ...
-    def popitem(self): ...
-    def pop(self, key, *args): ...
-    def setdefault(self, key, default=None): ...
-    def update(self, dict=None, **kwargs): ...
diff --git a/typeshed/stdlib/2and3/argparse.pyi b/typeshed/stdlib/2and3/argparse.pyi
index 32b734c..627426d 100644
--- a/typeshed/stdlib/2and3/argparse.pyi
+++ b/typeshed/stdlib/2and3/argparse.pyi
@@ -143,6 +143,7 @@ class _ArgumentGroup:
                      metavar: Union[str, Tuple[str, ...]] = ...,
                      dest: str = ...,
                      version: str = ...) -> None: ...
+    def add_mutually_exclusive_group(self, required: bool = ...) -> _MutuallyExclusiveGroup: ...
 
 class _MutuallyExclusiveGroup(_ArgumentGroup): ...
 
diff --git a/typeshed/stdlib/2and3/asynchat.pyi b/typeshed/stdlib/2and3/asynchat.pyi
new file mode 100644
index 0000000..f4a57a4
--- /dev/null
+++ b/typeshed/stdlib/2and3/asynchat.pyi
@@ -0,0 +1,41 @@
+from typing import Union, Tuple, Sequence
+from abc import abstractmethod
+
+import asyncore
+import socket
+
+class simple_producer:
+    def __init__(self, data: str, buffer_size: int = ...) -> None: ...
+    def more(self) -> str: ...
+
+class async_chat (asyncore.dispatcher):
+    ac_in_buffer_size = ...  # type: int
+    ac_out_buffer_size = ...  # type: int
+    def __init__(self, sock: socket.socket = None, map: asyncore._maptype = None) -> None: ...
+
+    @abstractmethod
+    def collect_incoming_data(self, data: str) -> None: ...
+    @abstractmethod
+    def found_terminator(self) -> None: ...
+    def set_terminator(self, term: Union[str, int, None]) -> None: ...
+    def get_terminator(self) -> Union[str, int, None]: ...
+    def handle_read(self) -> None: ...
+    def handle_write(self) -> None: ...
+    def handle_close(self) -> None: ...
+    def push(self, data: str) -> None: ...
+    def push_with_producer(self, producer: simple_producer) -> None: ...
+    def readable(self) -> bool: ...
+    def writable(self) -> bool: ...
+    def close_when_done(self) -> None: ...
+    def initiate_send(self) -> None: ...
+    def discard_buffers(self) -> None: ...
+
+import sys
+if sys.version_info < (3, 0, 0):
+    class fifo:
+        def __init__(self, list: Sequence[Union[str, simple_producer]] = ...) -> None: ...
+        def __len__(self) -> int: ...
+        def is_empty(self) -> bool: ...
+        def first(self) -> str: ...
+        def push(self, data: Union[str, simple_producer]) -> None: ...
+        def pop(self) -> Tuple[int, str]: ...
diff --git a/typeshed/stdlib/2and3/asyncore.pyi b/typeshed/stdlib/2and3/asyncore.pyi
new file mode 100644
index 0000000..86af4a5
--- /dev/null
+++ b/typeshed/stdlib/2and3/asyncore.pyi
@@ -0,0 +1,127 @@
+from typing import Tuple, Union, Optional, Any, Dict, overload
+
+import select, socket, sys, time, warnings, os
+
+from errno import (EALREADY, EINPROGRESS, EWOULDBLOCK, ECONNRESET, EINVAL,
+                   ENOTCONN, ESHUTDOWN, EINTR, EISCONN, EBADF, ECONNABORTED,
+                   EPIPE, EAGAIN, errorcode)
+
+# cyclic dependence with asynchat
+_maptype = Dict[str, Any]
+
+
+class ExitNow(Exception): pass
+
+def read(obj: Any) -> None: ...
+def write(obj: Any) -> None: ...
+def readwrite(obj: Any, flags: int) -> None: ...
+def poll(timeout: float = ..., map: _maptype = ...) -> None: ...
+def poll2(timeout: float = ..., map: _maptype = ...) -> None: ...
+
+poll3 = poll2
+
+def loop(timeout: float = ..., use_poll: bool = ..., map: _maptype = ..., count: int = None) -> None: ...
+
+
+# Not really subclass of socket.socket; it's only delegation.
+# It is not covariant to it.
+class dispatcher:
+
+    debug = ...  # type: bool
+    connected = ...  # type: bool
+    accepting = ...  # type: bool
+    connecting = ...  # type: bool
+    closing = ...  # type: bool
+    ignore_log_types = ...  # type: frozenset[str]
+
+    def __init__(self, sock: socket.socket = None, map: _maptype = ...) -> None: ...
+    def add_channel(self, map: _maptype = ...) -> None: ...
+    def del_channel(self, map: _maptype = ...) -> None: ...
+    def create_socket(self, family: int, type: int) -> None: ...
+    def set_socket(self, sock: socket.socket, map: _maptype = ...) -> None: ...
+    def set_reuse_addr(self) -> None: ...
+    def readable(self) -> bool: ...
+    def writable(self) -> bool: ...
+    def accept(self) -> Optional[Tuple[socket.socket, Any]]: ...
+    def recv(self, buffer_size: int) -> str: ...
+    def log(self, message: Any) -> None: ...
+
+    def log_info(self, message: Any, type: str = ...) -> None: ...
+    def handle_read_event(self) -> None: ...
+    def handle_connect_event(self) -> None: ...
+    def handle_write_event(self) -> None: ...
+    def handle_expt_event(self) -> None: ...
+    def handle_error(self) -> None: ...
+    def handle_expt(self) -> None: ...
+    def handle_read(self) -> None: ...
+    def handle_write(self) -> None: ...
+    def handle_connect(self) -> None: ...
+    def handle_accept(self) -> None: ...
+    def handle_close(self) -> None: ...
+    def detach(self) -> int: ...
+    def fileno(self) -> int: ...
+
+    # return value is an address
+    def getpeername(self) -> Any: ...
+    def getsockname(self) -> Any: ...
+
+    @overload
+    def getsockopt(self, level: int, optname: int) -> int: ...
+    @overload
+    def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ...
+
+    def gettimeout(self) -> float: ...
+    def ioctl(self, control: object,
+              option: Tuple[int, int, int]) -> None: ...
+    def listen(self, backlog: int) -> None: ...
+    # TODO the return value may be BinaryIO or TextIO, depending on mode
+    def makefile(self, mode: str = ..., buffering: int = ...,
+                 encoding: str = ..., errors: str = ...,
+                 newline: str = ...) -> Any:
+        ...
+
+    # return type is an address
+    def recvfrom(self, bufsize: int, flags: int = ...) -> Any: ...
+    def recvfrom_into(self, buffer: str, nbytes: int, flags: int = ...) -> Any: ...
+    def recv_into(self, buffer: str, nbytes: int, flags: int = ...) -> Any: ...
+    def send(self, data: str, flags: int = ...) -> Optional[int]: ...
+    def sendall(self, data: str, flags: int = ...) -> None: ...
+    def sendto(self, data: str, address: Union[tuple, str], flags: int = ...) -> int: ...
+    def setblocking(self, flag: bool) -> None: ...
+    def settimeout(self, value: Union[float, None]) -> None: ...
+    def setsockopt(self, level: int, optname: int, value: Union[int, str]) -> None: ...
+    def shutdown(self, how: int) -> None: ...
+
+class dispatcher_with_send(dispatcher):
+    def __init__(self, sock: socket.socket = ..., map: _maptype = ...) -> None: ...
+    def initiate_send(self) -> None: ...
+    def handle_write(self) -> None: ...
+    # incompatible signature:
+    # def send(self, data: str) -> Optional[int]: ...
+
+def compact_traceback() -> Tuple[Tuple[str, str, str], type, type, str]: ...
+def close_all(map: _maptype = ..., ignore_all: bool = ...) -> None: ...
+
+# if os.name == 'posix':
+#    import fcntl
+class file_wrapper:
+    fd = ...  # type: int
+
+    def __init__(self, fd: int) -> None: ...
+    def recv(self, bufsize: int, flags: int = ...) -> str: ...
+    def send(self, data: str, flags: int = ...) -> int: ...
+
+    @overload
+    def getsockopt(self, level: int, optname: int) -> int: ...
+    @overload
+    def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ...
+
+    def read(self, bufsize: int, flags: int = ...) -> str: ...
+    def write(self, data: str, flags: int = ...) -> int: ...
+
+    def close(self) -> None: ...
+    def fileno(self) -> int: ...
+
+class file_dispatcher(dispatcher):
+    def __init__(self, fd: int, map: _maptype = ...) -> None: ...
+    def set_file(self, fd: int) -> None: ...
diff --git a/typeshed/stdlib/2and3/fractions.pyi b/typeshed/stdlib/2and3/fractions.pyi
new file mode 100644
index 0000000..66408fb
--- /dev/null
+++ b/typeshed/stdlib/2and3/fractions.pyi
@@ -0,0 +1,94 @@
+# Stubs for fractions
+# See https://docs.python.org/3/library/fractions.html
+#
+# Note: these stubs are incomplete. The more complex type
+# signatures are currently omitted. Also see numbers.pyi.
+
+from typing import Optional, TypeVar, Union, overload
+from numbers import Real, Integral, Rational
+from decimal import Decimal
+import sys
+
+_ComparableNum = Union[int, float, Decimal, Real]
+
+
+ at overload
+def gcd(a: int, b: int) -> int: ...
+ at overload
+def gcd(a: Integral, b: int) -> Integral: ...
+ at overload
+def gcd(a: int, b: Integral) -> Integral: ...
+ at overload
+def gcd(a: Integral, b: Integral) -> Integral: ...
+
+
+class Fraction(Rational):
+    @overload
+    def __init__(self,
+                 numerator: Union[int, Rational] = 0,
+                 denominator: Optional[Union[int, Rational]] = 0,
+                 *,
+                 _normalize: bool = True) -> None: ...
+    @overload
+    def __init__(self, value: float, *, _normalize=True) -> None: ...
+    @overload
+    def __init__(self, value: Decimal, *, _normalize=True) -> None: ...
+    @overload
+    def __init__(self, value: str, *, _normalize=True) -> None: ...
+
+    @classmethod
+    def from_float(cls, f: float) -> 'Fraction': ...
+    @classmethod
+    def from_decimal(cls, dec: Decimal) -> 'Fraction': ...
+    def limit_denominator(self, max_denominator: int = 1000000) -> 'Fraction': ...
+
+    @property
+    def numerator(self) -> int: ...
+    @property
+    def denominator(self) -> int: ...
+
+    def __add__(self, other): ...
+    def __radd__(self, other): ...
+    def __sub__(self, other): ...
+    def __rsub__(self, other): ...
+    def __mul__(self, other): ...
+    def __rmul__(self, other): ...
+    def __truediv__(self, other): ...
+    def __rtruediv__(self, other): ...
+    if sys.version_info < (3, 0):
+        def __div__(self, other): ...
+        def __rdiv__(self, other): ...
+    def __floordiv__(self, other) -> int: ...
+    def __rfloordiv__(self, other) -> int: ...
+    def __mod__(self, other): ...
+    def __rmod__(self, other): ...
+    def __pow__(self, other): ...
+    def __rpow__(self, other): ...
+
+    def __pos__(self) -> 'Fraction': ...
+    def __neg__(self) -> 'Fraction': ...
+    def __abs__(self) -> 'Fraction': ...
+    def __trunc__(self) -> int: ...
+    if sys.version_info >= (3, 0):
+        def __floor__(self) -> int: ...
+        def __ceil__(self) -> int: ...
+        def __round__(self, ndigits=None): ...
+
+    def __hash__(self) -> int: ...
+    def __eq__(self, other: object) -> bool: ...
+    def __lt__(self, other: _ComparableNum) -> bool: ...
+    def __gt__(self, other: _ComparableNum) -> bool: ...
+    def __le__(self, other: _ComparableNum) -> bool: ...
+    def __ge__(self, other: _ComparableNum) -> bool: ...
+    if sys.version_info >= (3, 0):
+        def __bool__(self) -> bool: ...
+    else:
+        def __nonzero__(self) -> bool: ...
+
+    # Not actually defined within fractions.py, but provides more useful
+    # overrides
+    @property
+    def real(self) -> 'Fraction': ...
+    @property
+    def imag(self) -> 'Fraction': ...
+    def conjugate(self) -> 'Fraction': ...
diff --git a/typeshed/stdlib/2and3/locale.pyi b/typeshed/stdlib/2and3/locale.pyi
index 7a7dac7..9ad00e7 100644
--- a/typeshed/stdlib/2and3/locale.pyi
+++ b/typeshed/stdlib/2and3/locale.pyi
@@ -2,10 +2,12 @@
 
 from typing import Any, Iterable, List, Mapping, Optional, Sequence, Tuple, Union
 import sys
-import builtins
-
-_str = builtins.str  # TODO workaround for mypy#2010
 
+# workaround for mypy#2010
+if sys.version_info < (3,):
+    from __builtin__ import str as _str
+else:
+    from builtins import str as _str
 
 CODESET = ...  # type: int
 D_T_FMT = ...  # type: int
@@ -95,8 +97,8 @@ def format_string(format: _str, val: Sequence[Any],
                   grouping: bool = ...) -> _str: ...
 def currency(val: int, symbol: bool = ..., grouping: bool = ...,
              international: bool = ...) -> _str: ...
-def str(float: float) -> _str: ...
 if sys.version_info >= (3, 5):
     def delocalize(string: _str) -> None: ...
 def atof(string: _str) -> float: ...
 def atoi(string: _str) -> int: ...
+def str(float: float) -> _str: ...
diff --git a/typeshed/stdlib/2and3/logging/__init__.pyi b/typeshed/stdlib/2and3/logging/__init__.pyi
index e22dfcc..b10f2c6 100644
--- a/typeshed/stdlib/2and3/logging/__init__.pyi
+++ b/typeshed/stdlib/2and3/logging/__init__.pyi
@@ -55,7 +55,8 @@ class Logger:
                       stack_info: bool = ..., extra: Dict[str, Any] = ...,
                       **kwargs: Any) -> None: ...
     else:
-        def debug(msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
+        def debug(self,
+                  msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
                   extra: Dict[str, Any] = ..., **kwargs: Any) -> None: ...
         def info(self,
                  msg: Text, *args: Any, exc_info: _ExcInfoType = ...,
@@ -344,3 +345,11 @@ class NullHandler(Handler): ...
 class PlaceHolder:
     def __init__(self, alogger: Logger) -> None: ...
     def append(self, alogger: Logger) -> None: ...
+
+
+# Below aren't in module docs but still visible
+
+class RootLogger(Logger):
+    pass
+
+root = ...  # type: RootLogger
diff --git a/typeshed/stdlib/2and3/mmap.pyi b/typeshed/stdlib/2and3/mmap.pyi
index 2f4978f..de5b402 100644
--- a/typeshed/stdlib/2and3/mmap.pyi
+++ b/typeshed/stdlib/2and3/mmap.pyi
@@ -73,8 +73,8 @@ if sys.version_info >= (3,):
         @overload
         def __setitem__(self, index: slice, object: bytes) -> None: ...
 else:
-    class mmap(_mmap, Sequence[str]):
-        def rfind(self, string: str, start: int = ..., stop: int = ...) -> int: ...
-        def __getitem__(self, index: Union[int, slice]) -> str: ...
+    class mmap(_mmap, Sequence[bytes]):
+        def rfind(self, string: bytes, start: int = ..., stop: int = ...) -> int: ...
+        def __getitem__(self, index: Union[int, slice]) -> bytes: ...
         def __delitem__(self, index: Union[int, slice]) -> None: ...
-        def __setitem__(self, index: Union[int, slice], object: str) -> None: ...
+        def __setitem__(self, index: Union[int, slice], object: bytes) -> None: ...
diff --git a/typeshed/stdlib/2and3/numbers.pyi b/typeshed/stdlib/2and3/numbers.pyi
new file mode 100644
index 0000000..e4f7603
--- /dev/null
+++ b/typeshed/stdlib/2and3/numbers.pyi
@@ -0,0 +1,140 @@
+# Stubs for numbers (Python 3.5)
+# See https://docs.python.org/2.7/library/numbers.html
+# and https://docs.python.org/3/library/numbers.html
+#
+# Note: these stubs are incomplete. The more complex type
+# signatures are currently omitted.
+
+from typing import Any, Optional, TypeVar
+from abc import ABCMeta, abstractmethod
+import sys
+
+class Number(metaclass=ABCMeta):
+    @abstractmethod
+    def __hash__(self) -> int: ...
+
+class Complex(Number):
+    @abstractmethod
+    def __complex__(self) -> complex: ...
+    if sys.version_info >= (3, 0):
+        def __bool__(self) -> bool: ...
+    else:
+        def __nonzero__(self) -> bool: ...
+    @property
+    @abstractmethod
+    def real(self): ...
+    @property
+    @abstractmethod
+    def imag(self): ...
+    @abstractmethod
+    def __add__(self, other): ...
+    @abstractmethod
+    def __radd__(self, other): ...
+    @abstractmethod
+    def __neg__(self): ...
+    @abstractmethod
+    def __pos__(self): ...
+    def __sub__(self, other): ...
+    def __rsub__(self, other): ...
+    @abstractmethod
+    def __mul__(self, other): ...
+    @abstractmethod
+    def __rmul__(self, other): ...
+    if sys.version_info < (3, 0):
+        @abstractmethod
+        def __div__(self, other): ...
+        @abstractmethod
+        def __rdiv__(self, other): ...
+    @abstractmethod
+    def __truediv__(self, other): ...
+    @abstractmethod
+    def __rtruediv__(self, other): ...
+    @abstractmethod
+    def __pow__(self, exponent): ...
+    @abstractmethod
+    def __rpow__(self, base): ...
+    def __abs__(self): ...
+    def conjugate(self): ...
+    def __eq__(self, other: object) -> bool: ...
+    if sys.version_info < (3, 0):
+        def __ne__(self, other: object) -> bool: ...
+
+class Real(Complex):
+    @abstractmethod
+    def __float__(self) -> float: ...
+    @abstractmethod
+    def __trunc__(self) -> int: ...
+    if sys.version_info >= (3, 0):
+        @abstractmethod
+        def __floor__(self) -> int: ...
+        @abstractmethod
+        def __ceil__(self) -> int: ...
+        @abstractmethod
+        def __round__(self, ndigits: Optional[int] = None): ...
+    def __divmod__(self, other): ...
+    def __rdivmod__(self, other): ...
+    @abstractmethod
+    def __floordiv__(self, other): ...
+    @abstractmethod
+    def __rfloordiv__(self, other): ...
+    @abstractmethod
+    def __mod__(self, other): ...
+    @abstractmethod
+    def __rmod__(self, other): ...
+    @abstractmethod
+    def __lt__(self, other) -> bool: ...
+    @abstractmethod
+    def __le__(self, other) -> bool: ...
+    def __complex__(self) -> complex: ...
+    @property
+    def real(self): ...
+    @property
+    def imag(self): ...
+    def conjugate(self): ...
+
+class Rational(Real):
+    @property
+    @abstractmethod
+    def numerator(self) -> int: ...
+    @property
+    @abstractmethod
+    def denominator(self) -> int: ...
+    def __float__(self) -> float: ...
+
+class Integral(Rational):
+    if sys.version_info >= (3, 0):
+        @abstractmethod
+        def __int__(self) -> int: ...
+    else:
+        @abstractmethod
+        def __long__(self) -> long: ...
+    def __index__(self) -> int: ...
+    @abstractmethod
+    def __pow__(self, exponent, modulus=None): ...
+    @abstractmethod
+    def __lshift__(self, other): ...
+    @abstractmethod
+    def __rlshift__(self, other): ...
+    @abstractmethod
+    def __rshift__(self, other): ...
+    @abstractmethod
+    def __rrshift__(self, other): ...
+    @abstractmethod
+    def __and__(self, other): ...
+    @abstractmethod
+    def __rand__(self, other): ...
+    @abstractmethod
+    def __xor__(self, other): ...
+    @abstractmethod
+    def __rxor__(self, other): ...
+    @abstractmethod
+    def __or__(self, other): ...
+    @abstractmethod
+    def __ror__(self, other): ...
+    @abstractmethod
+    def __invert__(self): ...
+    def __float__(self) -> float: ...
+    @property
+    def numerator(self) -> int: ...
+    @property
+    def denominator(self) -> int: ...
diff --git a/typeshed/stdlib/2and3/operator.pyi b/typeshed/stdlib/2and3/operator.pyi
index a45a79d..adce212 100644
--- a/typeshed/stdlib/2and3/operator.pyi
+++ b/typeshed/stdlib/2and3/operator.pyi
@@ -161,9 +161,9 @@ def attrgetter(attr: str) -> Callable[[Any], Any]: ...
 def attrgetter(*attrs: str) -> Callable[[Any], Tuple[Any, ...]]: ...
 
 @overload
-def itemgetter(item: int) -> Callable[[Any], Any]: ...
+def itemgetter(item: Any) -> Callable[[Any], Any]: ...
 @overload
-def itemgetter(*items: int) -> Callable[[Any], Tuple[Any, ...]]: ...
+def itemgetter(*items: Any) -> Callable[[Any], Tuple[Any, ...]]: ...
 
 def methodcaller(name: str, *args: Any, **kwargs: Any) -> Callable[..., Any]: ...
 
diff --git a/typeshed/stdlib/2and3/plistlib.pyi b/typeshed/stdlib/2and3/plistlib.pyi
index a367c7a..2790199 100644
--- a/typeshed/stdlib/2and3/plistlib.pyi
+++ b/typeshed/stdlib/2and3/plistlib.pyi
@@ -36,7 +36,7 @@ def readPlist(pathOrFile: Union[_Path, IO[bytes]]) -> DictT[str, Any]: ...
 def writePlist(value: Mapping[str, Any], pathOrFile: Union[_Path, IO[bytes]]) -> None: ...
 def readPlistFromBytes(data: bytes) -> DictT[str, Any]: ...
 def writePlistToBytes(value: Mapping[str, Any]) -> bytes: ...
-if sys.version_info < (3,) and sys.platform == 'darwin':
+if sys.version_info < (3,):
     def readPlistFromResource(path: _Path, restype: str = ...,
                               resid: int = ...) -> DictT[str, Any]: ...
     def writePlistToResource(rootObject: Mapping[str, Any], path: _Path,
diff --git a/typeshed/stdlib/2and3/tarfile.pyi b/typeshed/stdlib/2and3/tarfile.pyi
new file mode 100644
index 0000000..4ab1c41
--- /dev/null
+++ b/typeshed/stdlib/2and3/tarfile.pyi
@@ -0,0 +1,178 @@
+## Stubs for tarfile
+
+from typing import (
+    Callable, IO, Iterator, List, Mapping, Optional, Type,
+    Union,
+)
+import sys
+from types import TracebackType
+
+
+ENCODING = ...  # type: str
+
+USTAR_FORMAT = ...  # type: int
+GNU_FORMAT = ...  # type: int
+PAX_FORMAT = ...  # type: int
+DEFAULT_FORMAT = ...  # type: int
+
+REGTYPE = ...  # type: bytes
+AREGTYPE = ...  # type: bytes
+LNKTYPE = ...  # type: bytes
+SYMTYPE = ...  # type: bytes
+DIRTYPE = ...  # type: bytes
+FIFOTYPE = ...  # type: bytes
+CONTTYPE = ...  # type: bytes
+CHRTYPE = ...  # type: bytes
+BLKTYPE = ...  # type: bytes
+GNUTYPE_SPARSE = ...  # type: bytes
+
+if sys.version_info < (3,):
+    TAR_PLAIN = ...  # type: int
+    TAR_GZIPPED = ...  # type: int
+
+def open(name: Optional[str] = ..., mode: str = ...,
+        fileobj: Optional[IO[bytes]] = ..., bufsize: int = ...,
+        *, format: Optional[int] = ..., tarinfo: Optional[TarInfo] = ...,
+        dereference: Optional[bool] = ...,
+        ignore_zeros: Optional[bool] = ...,
+        encoding: Optional[str] = ..., errors: str = ...,
+        pax_headers: Optional[Mapping[str, str]] = ...,
+        debug: Optional[int] = ...,
+        errorlevel: Optional[int] = ...) -> TarFile: ...
+
+
+class TarFile:
+    name = ...  # type: Optional[str]
+    mode = ...  # type: str
+    fileobj = ...  # type: Optional[IO[bytes]]
+    format = ...  # type: Optional[int]
+    tarinfo = ...  # type: Optional[TarInfo]
+    dereference = ...  # type: Optional[bool]
+    ignore_zeros = ...  # type: Optional[bool]
+    encoding = ...  # type: Optional[str]
+    errors = ...  # type: str
+    pax_headers = ...  # type: Optional[Mapping[str, str]]
+    debug = ...  # type: Optional[int]
+    errorlevel = ...  # type: Optional[int]
+    if sys.version_info < (3,):
+        posix = ...  # type: bool
+    def __init__(self, name: Optional[str] = ..., mode: str = ...,
+                 fileobj: Optional[IO[bytes]] = ...,
+                 format: Optional[int] = ..., tarinfo: Optional[TarInfo] = ...,
+                 dereference: Optional[bool] = ...,
+                 ignore_zeros: Optional[bool] = ...,
+                 encoding: Optional[str] = ..., errors: str = ...,
+                 pax_headers: Optional[Mapping[str, str]] = ...,
+                 debug: Optional[int] = ...,
+                 errorlevel: Optional[int] = ...) -> None: ...
+    def __enter__(self) -> TarFile: ...
+    def __exit__(self,
+                 exc_type: Optional[Type[BaseException]],
+                 exc_val: Optional[Exception],
+                 exc_tb: Optional[TracebackType]) -> bool: ...
+    def __iter__(self) -> Iterator[TarInfo]: ...
+    @classmethod
+    def open(cls, name: Optional[str] = ..., mode: str = ...,
+             fileobj: Optional[IO[bytes]] = ..., bufsize: int = ...,
+             *, format: Optional[int] = ..., tarinfo: Optional[TarInfo] = ...,
+             dereference: Optional[bool] = ...,
+             ignore_zeros: Optional[bool] = ...,
+             encoding: Optional[str] = ..., errors: str = ...,
+             pax_headers: Optional[Mapping[str, str]] = ...,
+             debug: Optional[int] = ...,
+             errorlevel: Optional[int] = ...) -> TarFile: ...
+    def getmember(self, name: str) -> TarInfo: ...
+    def getmembers(self) -> List[TarInfo]: ...
+    def getnames(self) -> List[str]: ...
+    if sys.version_info >= (3, 5):
+        def list(self, verbose: bool = ...,
+                 *, members: Optional[List[TarInfo]] = ...) -> None: ...
+    else:
+        def list(self, verbose: bool = ...) -> None: ...
+    def next(self) -> Optional[TarInfo]: ...
+    if sys.version_info >= (3, 5):
+        def extractall(self, path: str = ...,
+                       members: Optional[List[TarInfo]] = ...,
+                       *, numeric_owner: bool = ...) -> None: ...
+    else:
+        def extractall(self, path: str = ...,
+                       members: Optional[List[TarInfo]] = ...) -> None: ...
+    if sys.version_info >= (3, 5):
+        def extract(self, member: Union[str, TarInfo], path: str = ...,
+                    set_attrs: bool = ...,
+                    *, numeric_owner: bool = ...) -> None: ...
+    elif sys.version_info >= (3,):
+        def extract(self, member: Union[str, TarInfo], path: str = ...,
+                    set_attrs: bool = ...) -> None: ...
+    else:
+        def extract(self, member: Union[str, TarInfo],
+                    path: str = ...) -> None: ...
+    def extractfile(self,
+                    member: Union[str, TarInfo]) -> Optional[IO[bytes]]: ...
+    if sys.version_info >= (3,):
+        def add(self, name: str, arcname: Optional[str] = ...,
+                recursive: bool = ...,
+                exclude: Optional[Callable[[str], bool]] = ..., *,
+                filter: Optional[Callable[[TarInfo], Optional[TarInfo]]] = ...) -> None: ...
+    else:
+        def add(self, name: str, arcname: Optional[str] = ...,
+                recursive: bool = ...,
+                exclude: Optional[Callable[[str], bool]] = ...,
+                filter: Optional[Callable[[TarInfo], Optional[TarInfo]]] = ...) -> None: ...
+    def addfile(self, tarinfo: TarInfo,
+                fileobj: Optional[IO[bytes]] = ...) -> None: ...
+    def gettarinfo(self, name: Optional[str] = ...,
+                   arcname: Optional[str] = ...,
+                   fileobj: Optional[IO[bytes]] = ...) -> TarInfo: ...
+    def close(self) -> None: ...
+
+
+def is_tarfile(name: str) -> bool: ...
+
+
+if sys.version_info < (3,):
+    class TarFileCompat:
+        def __init__(self, filename: str, mode: str = ...,
+                     compression: int = ...) -> None: ...
+
+
+class TarError(Exception): ...
+class ReadError(TarError): ...
+class CompressionError(TarError): ...
+class StreamError(TarError): ...
+class ExtractError(TarError): ...
+class HeaderError(TarError): ...
+
+
+class TarInfo:
+    name = ...  # type: str
+    size = ...  # type: int
+    mtime = ...  # type: int
+    mode = ...  # type: int
+    type = ...  # type: bytes
+    linkname = ...  # type: str
+    uid = ...  # type: int
+    gid = ...  # type: int
+    uname = ...  # type: str
+    gname = ...  # type: str
+    pax_headers = ...  # type: Mapping[str, str]
+    def __init__(self, name: str = ...) -> None: ...
+    if sys.version_info >= (3,):
+        @classmethod
+        def frombuf(cls, buf: bytes, encoding: str, errors: str) -> TarInfo: ...
+    else:
+        @classmethod
+        def frombuf(cls, buf: bytes) -> TarInfo: ...
+    @classmethod
+    def fromtarfile(cls, tarfile: TarFile) -> TarInfo: ...
+    def tobuf(self, format: Optional[int] = ...,
+              encoding: Optional[str] = ..., errors: str = ...) -> bytes: ...
+    def isfile(self) -> bool: ...
+    def isreg(self) -> bool: ...
+    def isdir(self) -> bool: ...
+    def issym(self) -> bool: ...
+    def islnk(self) -> bool: ...
+    def ischr(self) -> bool: ...
+    def isblk(self) -> bool: ...
+    def isfifo(self) -> bool: ...
+    def isdev(self) -> bool: ...
diff --git a/typeshed/stdlib/3.4/asyncio/tasks.pyi b/typeshed/stdlib/3.4/asyncio/tasks.pyi
index 69a4ac3..e5b101c 100644
--- a/typeshed/stdlib/3.4/asyncio/tasks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/tasks.pyi
@@ -1,4 +1,4 @@
-from typing import Any, TypeVar, Set, Dict, List, TextIO, Union, Tuple, Generic, Callable, Generator, Iterable, Awaitable, overload
+from typing import Any, TypeVar, Set, Dict, List, TextIO, Union, Tuple, Generic, Callable, Generator, Iterable, Awaitable, overload, Sequence, Iterator
 
 __all__ = ... # type: str
 
@@ -9,11 +9,19 @@ FIRST_EXCEPTION = 'FIRST_EXCEPTION'
 FIRST_COMPLETED = 'FIRST_COMPLETED'
 ALL_COMPLETED = 'ALL_COMPLETED'
 _T = TypeVar('_T')
+def as_completed(fs: Sequence[Future[_T]], *, loop: AbstractEventLoop = ..., timeout=None) -> Iterator[Generator[Any, None, _T]]: ...
+def ensure_future(coro_or_future: Union[Future[_T], Generator[Any, None, _T]], *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
+def gather(*coros_or_futures: Sequence[Union[Future[_T], Generator[Any, None, _T]]], loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[_T]: ...
+def run_coroutine_threadsafe(coro: Generator[Any, None, _T], loop: AbstractEventLoop) -> Future[_T]: ...
+def shield(arg: Union[Future[_T], Generator[Any, None, _T]], *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
 def sleep(delay: float, result: _T = ..., loop: AbstractEventLoop = ...) -> Future[_T]: ...
 def wait(fs: List[Task[_T]], *, loop: AbstractEventLoop = ...,
     timeout: float = ..., return_when: str = ...) -> Future[Tuple[Set[Future[_T]], Set[Future[_T]]]]: ...
 def wait_for(fut: Union[Future[_T], Generator[Any, None, _T]], timeout: float, *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
 
+class _GatheringFuture(Future[_T], Generic[_T]):
+    def __init__(self, children: Sequence[Union[Future[_T], Generator[Any, None, _T]]], *, loop: AbstractEventLoop = ...) -> None: ...
+    def cancel(self) -> bool: ...
 
 class Task(Future[_T], Generic[_T]):
     _all_tasks = ...  # type: Set[Task]
diff --git a/typeshed/stdlib/3.4/enum.pyi b/typeshed/stdlib/3.4/enum.pyi
index 4032e81..ac04a4a 100644
--- a/typeshed/stdlib/3.4/enum.pyi
+++ b/typeshed/stdlib/3.4/enum.pyi
@@ -12,7 +12,8 @@ class Enum:
     name = ...  # type: str
     value = ...  # type: Any
 
-class IntEnum(int, Enum): ...
+class IntEnum(int, Enum):
+    value = ...  # type: int
 
 _T = TypeVar('_T')
 
diff --git a/typeshed/stdlib/3.5/pathlib.pyi b/typeshed/stdlib/3.5/pathlib.pyi
index 7b4fdb9..34b2669 100644
--- a/typeshed/stdlib/3.5/pathlib.pyi
+++ b/typeshed/stdlib/3.5/pathlib.pyi
@@ -1,12 +1,12 @@
-# Stubs for pathlib (Python 3.4)
+# Stubs for pathlib (Python 3.5)
 
 from typing import Any, Generator, IO, Optional, Sequence, Tuple, Union
 import os
 
 class PurePath:
     parts = ...  # type: Tuple[str, ...]
-    drive = ... # type: str
-    root = ... # type: str
+    drive = ...  # type: str
+    root = ...  # type: str
     anchor = ...  # type: str
     parents = ...  # type: Sequence[PurePath]
     parent = ...  # type: PurePath
@@ -14,20 +14,20 @@ class PurePath:
     suffix = ...  # type: str
     suffixes = ...  # type: List[str]
     stem = ...  # type: str
-    def __init__(self, *pathsegments: str) -> None: ...
-    def __hash__(self) -> int: ...
+    def __new__(cls, *args: Union[str, PurePath]) -> PurePath: ...
     def __lt__(self, other: PurePath) -> bool: ...
     def __le__(self, other: PurePath) -> bool: ...
     def __gt__(self, other: PurePath) -> bool: ...
     def __ge__(self, other: PurePath) -> bool: ...
     def __truediv__(self, key: Union[str, PurePath]) -> PurePath: ...
+    def __rtruediv__(self, key: Union[str, PurePath]) -> PurePath: ...
     def __bytes__(self) -> bytes: ...
     def as_posix(self) -> str: ...
     def as_uri(self) -> str: ...
     def is_absolute(self) -> bool: ...
     def is_reserved(self) -> bool: ...
     def match(self, path_pattern: str) -> bool: ...
-    def relative_to(self, *other: str) -> PurePath: ...
+    def relative_to(self, *other: Union[str, PurePath]) -> PurePath: ...
     def with_name(self, name: str) -> PurePath: ...
     def with_suffix(self, suffix: str) -> PurePath: ...
     def joinpath(self, *other: Union[str, PurePath]) -> PurePath: ...
@@ -40,6 +40,8 @@ class Path(PurePath):
     def cwd(cls) -> Path: ...
     @classmethod
     def home(cls) -> Path: ...
+    def __new__(cls, *args: Union[str, PurePath], **kwargs: Any) -> Path: ...
+    def absolute(self) -> Path: ...
     def stat(self) -> os.stat_result: ...
     def chmod(self, mode: int) -> None: ...
     def exists(self) -> bool: ...
@@ -64,14 +66,14 @@ class Path(PurePath):
     def owner(self) -> str: ...
     def read_bytes(self) -> bytes: ...
     def read_text(self, encoding: Optional[str] = ...,
-                  errors: Optional[str] = ...) -> bytes: ...
-    def rename(self, target: Union[str, Path]) -> None: ...
-    def replace(self, target: Union[str, Path]) -> None: ...
+                  errors: Optional[str] = ...) -> str: ...
+    def rename(self, target: Union[str, PurePath]) -> None: ...
+    def replace(self, target: Union[str, PurePath]) -> None: ...
     def resolve(self) -> Path: ...
     def rglob(self, pattern: str) -> Generator[Path, None, None]: ...
     def rmdir(self) -> None: ...
-    def samefile(self, other_path: Union[str, Path]) -> bool: ...
-    def symlink_to(self, target: Union[str, Path],
+    def samefile(self, other_path: Union[str, bytes, int, Path]) -> bool: ...
+    def symlink_to(self, target: Union[str, PurePath],
                    target_is_directory: bool = ...) -> None: ...
     def touch(self, mode: int = ..., exist_ok: bool = ...) -> None: ...
     def unlink(self) -> None: ...
@@ -79,5 +81,17 @@ class Path(PurePath):
     def write_text(self, data: str, encoding: Optional[str] = ...,
                    errors: Optional[str] = ...) -> int: ...
 
+    # The following methods are re-stubbed here even though they only actually exist in the base
+    # class so that they return Path when called on a Path, rather than returning PurePath.
+    parents = ...  # type: Sequence[Path]
+    parent = ...  # type: Path
+    def __truediv__(self, key: Union[str, PurePath]) -> Path: ...
+    def __rtruediv__(self, key: Union[str, PurePath]) -> Path: ...
+    def relative_to(self, *other: Union[str, PurePath]) -> Path: ...
+    def with_name(self, name: str) -> Path: ...
+    def with_suffix(self, suffix: str) -> Path: ...
+    def joinpath(self, *args: Union[str, PurePath]) -> Path: ...
+
+
 class PosixPath(Path, PurePosixPath): ...
 class WindowsPath(Path, PureWindowsPath): ...
diff --git a/typeshed/stdlib/3/atexit.pyi b/typeshed/stdlib/3/atexit.pyi
index f4f0ada..24f9389 100644
--- a/typeshed/stdlib/3/atexit.pyi
+++ b/typeshed/stdlib/3/atexit.pyi
@@ -5,5 +5,5 @@ from typing import Any, Callable
 def _clear() -> None: ...
 def _ncallbacks() -> int: ...
 def _run_exitfuncs() -> None: ...
-def register(func: Callable[..., Any], *args, **kwargs) -> Callable[..., Any]: ...
+def register(func: Callable[..., Any], *args: Any, **kwargs: Any) -> Callable[..., Any]: ...
 def unregister(func: Callable[..., Any]) -> None: ...
diff --git a/typeshed/stdlib/3/binascii.pyi b/typeshed/stdlib/3/binascii.pyi
index 9f96136..edbd970 100644
--- a/typeshed/stdlib/3/binascii.pyi
+++ b/typeshed/stdlib/3/binascii.pyi
@@ -2,16 +2,16 @@
 
 # Based on http://docs.python.org/3.2/library/binascii.html
 
-import typing
+from typing import Union
 
-def a2b_uu(string: bytes) -> bytes: ...
+def a2b_uu(string: Union[str, bytes]) -> bytes: ...
 def b2a_uu(data: bytes) -> bytes: ...
-def a2b_base64(string: bytes) -> bytes: ...
+def a2b_base64(string: Union[str, bytes]) -> bytes: ...
 def b2a_base64(data: bytes) -> bytes: ...
-def a2b_qp(string: bytes, header: bool = ...) -> bytes: ...
+def a2b_qp(string: Union[str, bytes], header: bool = ...) -> bytes: ...
 def b2a_qp(data: bytes, quotetabs: bool = ..., istext: bool = ...,
              header: bool = ...) -> bytes: ...
-def a2b_hqx(string: bytes) -> bytes: ...
+def a2b_hqx(string: Union[str, bytes]) -> bytes: ...
 def rledecode_hqx(data: bytes) -> bytes: ...
 def rlecode_hqx(data: bytes) -> bytes: ...
 def b2a_hqx(data: bytes) -> bytes: ...
@@ -19,8 +19,8 @@ def crc_hqx(data: bytes, crc: int) -> int: ...
 def crc32(data: bytes, crc: int = ...) -> int: ...
 def b2a_hex(data: bytes) -> bytes: ...
 def hexlify(data: bytes) -> bytes: ...
-def a2b_hex(hexstr: bytes) -> bytes: ...
-def unhexlify(hexlify: bytes) -> bytes: ...
+def a2b_hex(hexstr: Union[str, bytes]) -> bytes: ...
+def unhexlify(hexlify: Union[str, bytes]) -> bytes: ...
 
 class Error(Exception): ...
 class Incomplete(Exception): ...
diff --git a/typeshed/stdlib/3/builtins.pyi b/typeshed/stdlib/3/builtins.pyi
index ae113be..6f3e1f1 100644
--- a/typeshed/stdlib/3/builtins.pyi
+++ b/typeshed/stdlib/3/builtins.pyi
@@ -4,7 +4,7 @@ from typing import (
     TypeVar, Iterator, Iterable, overload,
     Sequence, MutableSequence, Mapping, MutableMapping, Tuple, List, Any, Dict, Callable, Generic,
     Set, AbstractSet, MutableSet, Sized, Reversible, SupportsInt, SupportsFloat, SupportsBytes,
-    SupportsAbs, SupportsRound, IO, Union, ItemsView, KeysView, ValuesView, ByteString
+    SupportsAbs, SupportsRound, IO, Union, ItemsView, KeysView, ValuesView, ByteString, Optional
 )
 from abc import abstractmethod, ABCMeta
 from types import TracebackType
@@ -63,7 +63,7 @@ class type:
     def mro(self) -> List[type]: ...
 
 class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
-    def __init__(self, x: Union[SupportsInt, str, bytes] = None, base: int = None) -> None: ...
+    def __init__(self, x: Union[SupportsInt, str, bytes] = ..., base: int = ...) -> None: ...
     def bit_length(self) -> int: ...
     def to_bytes(self, length: int, byteorder: str, *, signed: bool = ...) -> bytes: ...
     @classmethod
@@ -112,7 +112,7 @@ class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     def __hash__(self) -> int: ...
 
 class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
-    def __init__(self, x: Union[SupportsFloat, str, bytes]=None) -> None: ...
+    def __init__(self, x: Union[SupportsFloat, str, bytes] = ...) -> None: ...
     def as_integer_ratio(self) -> Tuple[int, int]: ...
     def hex(self) -> str: ...
     def is_integer(self) -> bool: ...
@@ -148,6 +148,7 @@ class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     def __float__(self) -> float: ...
     def __abs__(self) -> float: ...
     def __hash__(self) -> int: ...
+    def __format__(self, format_spec: str) -> str: ...
 
 class complex(SupportsAbs[float]):
     @overload
@@ -188,7 +189,7 @@ class str(Sequence[str]):
     @overload
     def __init__(self, o: object) -> None: ...
     @overload
-    def __init__(self, o: bytes, encoding: str = None, errors: str = 'strict') -> None: ...
+    def __init__(self, o: bytes, encoding: str = ..., errors: str = 'strict') -> None: ...
     def capitalize(self) -> str: ...
     def center(self, width: int, fillchar: str = ' ') -> str: ...
     def count(self, x: str) -> int: ...
@@ -274,12 +275,14 @@ class bytes(ByteString):
     @overload
     def __init__(self, o: SupportsBytes) -> None: ...
     def capitalize(self) -> bytes: ...
-    def center(self, width: int, fillchar: bytes = None) -> bytes: ...
+    def center(self, width: int, fillchar: bytes = ...) -> bytes: ...
     def count(self, x: bytes) -> int: ...
     def decode(self, encoding: str = 'utf-8', errors: str = 'strict') -> str: ...
     def endswith(self, suffix: Union[bytes, Tuple[bytes, ...]]) -> bool: ...
     def expandtabs(self, tabsize: int = 8) -> bytes: ...
     def find(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+    if sys.version_info >= (3, 5):
+        def hex(self) -> str: ...
     def index(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
     def isalnum(self) -> bool: ...
     def isalpha(self) -> bool: ...
@@ -289,14 +292,14 @@ class bytes(ByteString):
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
     def join(self, iterable: Iterable[bytes]) -> bytes: ...
-    def ljust(self, width: int, fillchar: bytes = None) -> bytes: ...
+    def ljust(self, width: int, fillchar: bytes = ...) -> bytes: ...
     def lower(self) -> bytes: ...
     def lstrip(self, chars: bytes = None) -> bytes: ...
     def partition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
     def replace(self, old: bytes, new: bytes, count: int = -1) -> bytes: ...
     def rfind(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
     def rindex(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
-    def rjust(self, width: int, fillchar: bytes = None) -> bytes: ...
+    def rjust(self, width: int, fillchar: bytes = ...) -> bytes: ...
     def rpartition(self, sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
     def rsplit(self, sep: bytes = None, maxsplit: int = -1) -> List[bytes]: ...
     def rstrip(self, chars: bytes = None) -> bytes: ...
@@ -306,7 +309,7 @@ class bytes(ByteString):
     def strip(self, chars: bytes = None) -> bytes: ...
     def swapcase(self) -> bytes: ...
     def title(self) -> bytes: ...
-    def translate(self, table: bytes, delete: bytes = None) -> bytes: ...
+    def translate(self, table: Optional[bytes], delete: bytes = ...) -> bytes: ...
     def upper(self) -> bytes: ...
     def zfill(self, width: int) -> bytes: ...
     @classmethod
@@ -346,12 +349,14 @@ class bytearray(MutableSequence[int], ByteString):
     @overload
     def __init__(self) -> None: ...
     def capitalize(self) -> bytearray: ...
-    def center(self, width: int, fillchar: bytes = None) -> bytearray: ...
+    def center(self, width: int, fillchar: bytes = ...) -> bytearray: ...
     def count(self, x: bytes) -> int: ...
     def decode(self, encoding: str = 'utf-8', errors: str = 'strict') -> str: ...
     def endswith(self, suffix: bytes) -> bool: ...
     def expandtabs(self, tabsize: int = 8) -> bytearray: ...
     def find(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
+    if sys.version_info >= (3, 5):
+        def hex(self) -> str: ...
     def index(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
     def insert(self, index: int, object: int) -> None: ...
     def isalnum(self) -> bool: ...
@@ -362,14 +367,14 @@ class bytearray(MutableSequence[int], ByteString):
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
     def join(self, iterable: Iterable[bytes]) -> bytearray: ...
-    def ljust(self, width: int, fillchar: bytes = None) -> bytearray: ...
+    def ljust(self, width: int, fillchar: bytes = ...) -> bytearray: ...
     def lower(self) -> bytearray: ...
     def lstrip(self, chars: bytes = None) -> bytearray: ...
     def partition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
     def replace(self, old: bytes, new: bytes, count: int = -1) -> bytearray: ...
     def rfind(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
     def rindex(self, sub: bytes, start: int = 0, end: int = 0) -> int: ...
-    def rjust(self, width: int, fillchar: bytes = None) -> bytearray: ...
+    def rjust(self, width: int, fillchar: bytes = ...) -> bytearray: ...
     def rpartition(self, sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
     def rsplit(self, sep: bytes = None, maxsplit: int = -1) -> List[bytearray]: ...
     def rstrip(self, chars: bytes = None) -> bytearray: ...
@@ -379,7 +384,7 @@ class bytearray(MutableSequence[int], ByteString):
     def strip(self, chars: bytes = None) -> bytearray: ...
     def swapcase(self) -> bytearray: ...
     def title(self) -> bytearray: ...
-    def translate(self, table: bytes, delete: bytes = None) -> bytearray: ...
+    def translate(self, table: Optional[bytes], delete: bytes = ...) -> bytearray: ...
     def upper(self) -> bytearray: ...
     def zfill(self, width: int) -> bytearray: ...
     @classmethod
@@ -419,6 +424,8 @@ class bytearray(MutableSequence[int], ByteString):
 class memoryview():
     # TODO arg can be any obj supporting the buffer protocol
     def __init__(self, b: bytearray) -> None: ...
+    if sys.version_info >= (3, 5):
+        def hex(self) -> str: ...
 
 class bool(int, SupportsInt, SupportsFloat):
     def __init__(self, o: object = ...) -> None: ...
@@ -537,7 +544,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def __str__(self) -> str: ...
 
 class set(MutableSet[_T], Generic[_T]):
-    def __init__(self, iterable: Iterable[_T]=None) -> None: ...
+    def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
     def add(self, element: _T) -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> set[_T]: ...
@@ -574,7 +581,7 @@ class set(MutableSet[_T], Generic[_T]):
     # TODO more set operations
 
 class frozenset(AbstractSet[_T], Generic[_T]):
-    def __init__(self, iterable: Iterable[_T]=None) -> None: ...
+    def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
     def copy(self) -> frozenset[_T]: ...
     def difference(self, *s: Iterable[Any]) -> frozenset[_T]: ...
     def intersection(self, *s: Iterable[Any]) -> frozenset[_T]: ...
@@ -650,7 +657,7 @@ def compile(source: Any, filename: Union[str, bytes], mode: str, flags: int = 0,
 def copyright() -> None: ...
 def credits() -> None: ...
 def delattr(o: Any, name: str) -> None: ...
-def dir(o: object = None) -> List[str]: ...
+def dir(o: object = ...) -> List[str]: ...
 _N = TypeVar('_N', int, float)
 def divmod(a: _N, b: _N) -> Tuple[_N, _N]: ...
 def eval(source: str, globals: Dict[str, Any] = None,
@@ -660,7 +667,7 @@ def exec(object: str, globals: Dict[str, Any] = None,
 def exit(code: int = None) -> None: ...
 def filter(function: Callable[[_T], Any], iterable: Iterable[_T]) -> Iterator[_T]: ...
 def format(o: object, format_spec: str = '') -> str: ...
-def getattr(o: Any, name: str, default: Any = None) -> Any: ...
+def getattr(o: Any, name: str, default: Any = ...) -> Any: ...
 def globals() -> Dict[str, Any]: ...
 def hasattr(o: Any, name: str) -> bool: ...
 def hash(o: object) -> int: ...
@@ -683,14 +690,14 @@ def map(func: Callable[[_T1], _S], iter1: Iterable[_T1]) -> Iterator[_S]: ...
 def map(func: Callable[[_T1, _T2], _S], iter1: Iterable[_T1],
         iter2: Iterable[_T2]) -> Iterator[_S]: ...  # TODO more than two iterables
 @overload
-def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+def max(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
-def max(iterable: Iterable[_T], key: Callable[[_T], Any] = None, default:_T = None) -> _T: ...
+def max(iterable: Iterable[_T], key: Callable[[_T], Any] = ..., default:_T = ...) -> _T: ...
 # TODO memoryview
 @overload
-def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = None) -> _T: ...
+def min(arg1: _T, arg2: _T, *args: _T, key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
-def min(iterable: Iterable[_T], key: Callable[[_T], Any] = None, default:_T = None) -> _T: ...
+def min(iterable: Iterable[_T], key: Callable[[_T], Any] = ..., default:_T = ...) -> _T: ...
 @overload
 def next(i: Iterator[_T]) -> _T: ...
 @overload
@@ -726,8 +733,8 @@ def round(number: SupportsRound[_T], ndigits: int) -> _T: ...
 def setattr(object: Any, name: str, value: Any) -> None: ...
 def sorted(iterable: Iterable[_T], *, key: Callable[[_T], Any] = None,
            reverse: bool = False) -> List[_T]: ...
-def sum(iterable: Iterable[_T], start: _T = None) -> _T: ...
-def vars(object: Any = None) -> Dict[str, Any]: ...
+def sum(iterable: Iterable[_T], start: _T = ...) -> _T: ...
+def vars(object: Any = ...) -> Dict[str, Any]: ...
 @overload
 def zip(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
 @overload
diff --git a/typeshed/stdlib/3/calendar.pyi b/typeshed/stdlib/3/calendar.pyi
index 632ef2b..3901170 100644
--- a/typeshed/stdlib/3/calendar.pyi
+++ b/typeshed/stdlib/3/calendar.pyi
@@ -73,3 +73,14 @@ def setfirstweekday(firstweekday: int) -> None: ...
 def format(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ...
 def formatstring(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ...
 def timegm(tuple: Tuple[int, ...]) -> int: ...
+
+# Below constants are not in docs or __all__, but enough people have used them
+# they are now effectively public.
+
+MONDAY = ...  # type: int
+TUESDAY = ...  # type: int
+WEDNESDAY = ...  # type: int
+THURSDAY = ...  # type: int
+FRIDAY = ...  # type: int
+SATURDAY = ...  # type: int
+SUNDAY = ...  # type: int
diff --git a/typeshed/stdlib/3/collections/__init__.pyi b/typeshed/stdlib/3/collections/__init__.pyi
index 0dc340e..f93990b 100644
--- a/typeshed/stdlib/3/collections/__init__.pyi
+++ b/typeshed/stdlib/3/collections/__init__.pyi
@@ -7,7 +7,7 @@
 # These are not exported.
 from typing import (
     TypeVar, Iterable, Generic, Iterator, Dict, overload,
-    Mapping, List, Tuple, Callable, Sized,
+    Mapping, List, Tuple, Callable, Sized, Any, Type,
     Optional, Union
 )
 # These are exported.
@@ -25,7 +25,8 @@ _VT = TypeVar('_VT')
 
 
 # namedtuple is special-cased in the type checker; the initializer is ignored.
-namedtuple = object()
+def namedtuple(typename: str, field_names: Union[str, Iterable[Any]], *,
+               verbose: bool = ..., rename: bool = ..., module: str = None) -> Type[tuple]: ...
 
 class UserDict(MutableMapping): ...
 class UserList(MutableSequence): ...
@@ -107,6 +108,17 @@ class Counter(Dict[_T, int], Generic[_T]):
     @overload
     def update(self, m: Union[Iterable[_T], Iterable[Tuple[_T, int]]]) -> None: ...
 
+    def __add__(self, other: Counter[_T]) -> Counter[_T]: ...
+    def __sub__(self, other: Counter[_T]) -> Counter[_T]: ...
+    def __and__(self, other: Counter[_T]) -> Counter[_T]: ...
+    def __or__(self, other: Counter[_T]) -> Counter[_T]: ...
+    def __pos__(self) -> Counter[_T]: ...
+    def __neg__(self) -> Counter[_T]: ...
+    def __iadd__(self, other: Counter[_T]) -> Counter[_T]: ...
+    def __isub__(self, other: Counter[_T]) -> Counter[_T]: ...
+    def __iand__(self, other: Counter[_T]) -> Counter[_T]: ...
+    def __ior__(self, other: Counter[_T]) -> Counter[_T]: ...
+
 class OrderedDict(Dict[_KT, _VT], Generic[_KT, _VT]):
     def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ...
     def move_to_end(self, key: _KT, last: bool = ...) -> None: ...
diff --git a/typeshed/stdlib/3/concurrent/futures/__init__.pyi b/typeshed/stdlib/3/concurrent/futures/__init__.pyi
index 91cf274..5b6ab81 100644
--- a/typeshed/stdlib/3/concurrent/futures/__init__.pyi
+++ b/typeshed/stdlib/3/concurrent/futures/__init__.pyi
@@ -1,7 +1,3 @@
-# Stubs for concurrent.futures (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
 from ._base import *
 from .thread import *
 from .process import *
diff --git a/typeshed/stdlib/3/concurrent/futures/_base.pyi b/typeshed/stdlib/3/concurrent/futures/_base.pyi
index 98ffaf3..819e79c 100644
--- a/typeshed/stdlib/3/concurrent/futures/_base.pyi
+++ b/typeshed/stdlib/3/concurrent/futures/_base.pyi
@@ -1,8 +1,4 @@
-# Stubs for concurrent.futures._base (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple
+from typing import TypeVar, Generic, Any, Iterable, Iterator, Callable, Tuple, Optional
 from collections import namedtuple
 
 FIRST_COMPLETED = ... # type: Any
@@ -19,41 +15,6 @@ class Error(Exception): ...
 class CancelledError(Error): ...
 class TimeoutError(Error): ...
 
-class _Waiter:
-    event = ... # type: Any
-    finished_futures = ... # type: Any
-    def __init__(self): ...
-    def add_result(self, future): ...
-    def add_exception(self, future): ...
-    def add_cancelled(self, future): ...
-
-class _AsCompletedWaiter(_Waiter):
-    lock = ... # type: Any
-    def __init__(self): ...
-    def add_result(self, future): ...
-    def add_exception(self, future): ...
-    def add_cancelled(self, future): ...
-
-class _FirstCompletedWaiter(_Waiter):
-    def add_result(self, future): ...
-    def add_exception(self, future): ...
-    def add_cancelled(self, future): ...
-
-class _AllCompletedWaiter(_Waiter):
-    num_pending_calls = ... # type: Any
-    stop_on_exception = ... # type: Any
-    lock = ... # type: Any
-    def __init__(self, num_pending_calls, stop_on_exception): ...
-    def add_result(self, future): ...
-    def add_exception(self, future): ...
-    def add_cancelled(self, future): ...
-
-class _AcquireFutures:
-    futures = ... # type: Any
-    def __init__(self, futures): ...
-    def __enter__(self): ...
-    def __exit__(self, *args): ...
-
 DoneAndNotDoneFutures = namedtuple('DoneAndNotDoneFutures', 'done not_done')
 
 _T = TypeVar('_T')
@@ -65,19 +26,19 @@ class Future(Generic[_T]):
     def running(self) -> bool: ...
     def done(self) -> bool: ...
     def add_done_callback(self, fn: Callable[[Future], Any]) -> None: ...
-    def result(self, timeout: float = ...) -> _T: ...
-    def exception(self, timeout: float = ...) -> Exception: ...
+    def result(self, timeout: Optional[float] = ...) -> _T: ...
+    def exception(self, timeout: Optional[float] = ...) -> Exception: ...
     def set_running_or_notify_cancel(self) -> None: ...
     def set_result(self, result: _T) -> None: ...
     def set_exception(self, exception: Exception) -> None: ...
 
 class Executor:
     def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ...
-    def map(self, func: Callable[..., _T], *iterables: Any, timeout: float = ...) -> Iterable[_T]: ...
+    def map(self, func: Callable[..., _T], *iterables: Any, timeout: Optional[float] = ..., chunksize: int = ...) -> Iterable[_T]: ...
     def shutdown(self, wait: bool = ...) -> None: ...
     def __enter__(self) -> Executor: ...
     def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool: ...
 
-def as_completed(fs: Iterable[Future], timeout: float = ...) -> Iterator[Future]: ...
+def as_completed(fs: Iterable[Future], timeout: Optional[float] = ...) -> Iterator[Future]: ...
 
-def wait(fs: Iterable[Future], timeout: float = ..., return_when: str = ...) -> Tuple[Iterable[Future], Iterable[Future]]: ...
+def wait(fs: Iterable[Future], timeout: Optional[float] = ..., return_when: str = ...) -> Tuple[Iterable[Future], Iterable[Future]]: ...
diff --git a/typeshed/stdlib/3/concurrent/futures/process.pyi b/typeshed/stdlib/3/concurrent/futures/process.pyi
index 9bc56fa..d3da982 100644
--- a/typeshed/stdlib/3/concurrent/futures/process.pyi
+++ b/typeshed/stdlib/3/concurrent/futures/process.pyi
@@ -1,46 +1,15 @@
-# Stubs for concurrent.futures.process (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-from . import _base
+from typing import Any, Callable, TypeVar, Iterable, Optional
+from ._base import Future, Executor
 
 EXTRA_QUEUED_CALLS = ... # type: Any
 
-class _RemoteTraceback(Exception):
-    tb = ... # type: Any
-    def __init__(self, tb): ...
-
-class _ExceptionWithTraceback:
-    exc = ... # type: Any
-    tb = ... # type: Any
-    def __init__(self, exc, tb): ...
-    def __reduce__(self): ...
-
-class _WorkItem:
-    future = ... # type: Any
-    fn = ... # type: Any
-    args = ... # type: Any
-    kwargs = ... # type: Any
-    def __init__(self, future, fn, args, kwargs): ...
-
-class _ResultItem:
-    work_id = ... # type: Any
-    exception = ... # type: Any
-    result = ... # type: Any
-    def __init__(self, work_id, exception=None, result=None): ...
+class BrokenProcessPool(RuntimeError): ...
 
-class _CallItem:
-    work_id = ... # type: Any
-    fn = ... # type: Any
-    args = ... # type: Any
-    kwargs = ... # type: Any
-    def __init__(self, work_id, fn, args, kwargs): ...
+_T = TypeVar('_T')
 
-class BrokenProcessPool(RuntimeError): ...
+class ProcessPoolExecutor(Executor):
+    def __init__(self, max_workers: Optional[int] = ...) -> None: ...
+    def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ...
+    def map(self, func: Callable[..., _T], *iterables: Any, timeout: Optional[float] = ..., chunksize: int = ...) -> Iterable[_T]: ...
+    def shutdown(self, wait: bool = ...) -> None: ...
 
-class ProcessPoolExecutor(_base.Executor):
-    def __init__(self, max_workers=None): ...
-    def submit(self, fn, *args, **kwargs): ...
-    def map(self, fn, *iterables, timeout=None, chunksize=1): ...
-    def shutdown(self, wait=True): ...
diff --git a/typeshed/stdlib/3/concurrent/futures/thread.pyi b/typeshed/stdlib/3/concurrent/futures/thread.pyi
index f8242ff..65885fd 100644
--- a/typeshed/stdlib/3/concurrent/futures/thread.pyi
+++ b/typeshed/stdlib/3/concurrent/futures/thread.pyi
@@ -1,19 +1,11 @@
-# Stubs for concurrent.futures.thread (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
+from typing import Any, TypeVar, Callable, Iterable, Optional
+from ._base import Executor, Future
 
-from typing import Any
-from . import _base
+_T = TypeVar('_T')
 
-class _WorkItem:
-    future = ... # type: Any
-    fn = ... # type: Any
-    args = ... # type: Any
-    kwargs = ... # type: Any
-    def __init__(self, future, fn, args, kwargs): ...
-    def run(self): ...
+class ThreadPoolExecutor(Executor):
+    def __init__(self, max_workers: Optional[int] = ...) -> None: ...
+    def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ...
+    def map(self, func: Callable[..., _T], *iterables: Any, timeout: Optional[float] = ..., chunksize: int = ...) -> Iterable[_T]: ...
+    def shutdown(self, wait: bool = ...) -> None: ...
 
-class ThreadPoolExecutor(_base.Executor):
-    def __init__(self, max_workers=None): ...
-    def submit(self, fn, *args, **kwargs): ...
-    def shutdown(self, wait=True): ...
diff --git a/typeshed/stdlib/3/configparser.pyi b/typeshed/stdlib/3/configparser.pyi
index b495d59..a4f2301 100644
--- a/typeshed/stdlib/3/configparser.pyi
+++ b/typeshed/stdlib/3/configparser.pyi
@@ -3,8 +3,8 @@
 # Based on http://docs.python.org/3.5/library/configparser.html and on
 # reading configparser.py.
 
-from typing import (MutableMapping, Mapping, Dict, Sequence, List,
-                    Iterable, Iterator, Callable, Any, IO)
+from typing import (MutableMapping, Mapping, Dict, Sequence, List, Union,
+                    Iterable, Iterator, Callable, Any, IO, overload, Optional)
 # Types only used in type comments only
 from typing import Optional, Tuple  # noqa
 
@@ -48,19 +48,19 @@ class ExtendedInterpolation(Interpolation):
     pass
 
 
-class ConfigParser(_parser):
+class RawConfigParser(_parser):
     def __init__(self,
                  defaults: _section = None,
                  dict_type: Mapping[str, str] = ...,
                  allow_no_value: bool = ...,
+                 *,
                  delimiters: Sequence[str] = ...,
                  comment_prefixes: Sequence[str] = ...,
                  inline_comment_prefixes: Sequence[str] = None,
                  strict: bool = ...,
                  empty_lines_in_values: bool = ...,
                  default_section: str = ...,
-                 interpolation: Interpolation = None,
-                 converters: _converters = {}) -> None: ...
+                 interpolation: Interpolation = None) -> None: ...
 
     def __len__(self) -> int: ...
 
@@ -84,7 +84,7 @@ class ConfigParser(_parser):
 
     def has_option(self, section: str, option: str) -> bool: ...
 
-    def read(self, filenames: Sequence[str],
+    def read(self, filenames: Union[str, Sequence[str]],
              encoding: str = None) -> List[str]: ...
 
     def read_file(self, f: Iterable[str], source: str = None) -> None: ...
@@ -94,11 +94,15 @@ class ConfigParser(_parser):
     def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]],
                   source: str = ...) -> None: ...
 
-    def getint(self, section: str, option: str) -> int: ...
+    def getint(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: int = ...) -> int: ...
+
+    def getfloat(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: float = ...) -> float: ...
 
-    def getfloat(self, section: str, option: str) -> float: ...
+    def getboolean(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: bool = ...) -> bool: ...
 
-    def getboolean(self, section: str, option: str) -> bool: ...
+    # This is incompatible with MutableMapping so we ignore the type
+    def get(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: str = ...) -> str:  # type: ignore
+        ...
 
     def set(self, section: str, option: str, value: str) -> None: ...
 
@@ -113,6 +117,21 @@ class ConfigParser(_parser):
     def optionxform(self, option: str) -> str: ...
 
 
+class ConfigParser(RawConfigParser):
+    def __init__(self,
+                 defaults: _section = None,
+                 dict_type: Mapping[str, str] = ...,
+                 allow_no_value: bool = ...,
+                 delimiters: Sequence[str] = ...,
+                 comment_prefixes: Sequence[str] = ...,
+                 inline_comment_prefixes: Sequence[str] = None,
+                 strict: bool = ...,
+                 empty_lines_in_values: bool = ...,
+                 default_section: str = ...,
+                 interpolation: Interpolation = None,
+                 converters: _converters = {}) -> None: ...
+
+
 class Error(Exception):
     pass
 
diff --git a/typeshed/stdlib/3/datetime.pyi b/typeshed/stdlib/3/datetime.pyi
index 382a587..3d2a58f 100644
--- a/typeshed/stdlib/3/datetime.pyi
+++ b/typeshed/stdlib/3/datetime.pyi
@@ -14,9 +14,9 @@ class tzinfo:
     def fromutc(self, dt: datetime) -> datetime: ...
 
 class timezone(tzinfo):
-    utc = ...  # type: tzinfo
-    min = ...  # type: tzinfo
-    max = ...  # type: tzinfo
+    utc = ...  # type: timezone
+    min = ...  # type: timezone
+    max = ...  # type: timezone
 
     def __init__(self, offset: timedelta, name: str = ...) -> None: ...
     def __hash__(self) -> int: ...
@@ -93,7 +93,7 @@ class time:
     def isoformat(self) -> str: ...
     def strftime(self, fmt: str) -> str: ...
     def __format__(self, fmt: str) -> str: ...
-    def utcoffset(self) -> Optional[int]: ...
+    def utcoffset(self) -> Optional[timedelta]: ...
     def tzname(self) -> Optional[str]: ...
     def dst(self) -> Optional[int]: ...
     def replace(self, hour: int = ..., minute: int = ..., second: int = ...,
@@ -203,7 +203,7 @@ class datetime:
     def isoformat(self, sep: str = ...) -> str: ...
     @classmethod
     def strptime(cls, date_string: str, format: str) -> datetime: ...
-    def utcoffset(self) -> Optional[int]: ...
+    def utcoffset(self) -> Optional[timedelta]: ...
     def tzname(self) -> Optional[str]: ...
     def dst(self) -> Optional[int]: ...
     def __le__(self, other: datetime) -> bool: ...
diff --git a/typeshed/stdlib/3/decimal.pyi b/typeshed/stdlib/3/decimal.pyi
index 6145b8c..a7d88af 100644
--- a/typeshed/stdlib/3/decimal.pyi
+++ b/typeshed/stdlib/3/decimal.pyi
@@ -6,6 +6,7 @@ from typing import (
 )
 
 _Decimal = Union[Decimal, int]
+_ComparableNum = Union[Decimal, int, float]
 
 BasicContext = ...  # type: Context
 DefaultContext = ...  # type: Context
@@ -201,12 +202,12 @@ class Decimal(SupportsInt, SupportsFloat, SupportsAbs[Decimal], SupportsRound[in
     def __floor__(self) -> int: ...
     def __floordiv__(self, other: _Decimal) -> Decimal: ...
     def __format__(self, specifier, context=..., _localeconv=...) -> str: ...
-    def __ge__(self, other: _Decimal) -> bool: ...
-    def __gt__(self, other: _Decimal) -> bool: ...
+    def __ge__(self, other: _ComparableNum) -> bool: ...
+    def __gt__(self, other: _ComparableNum) -> bool: ...
     def __hash__(self) -> int: ...
     def __int__(self) -> int: ...
-    def __le__(self, other: _Decimal) -> bool: ...
-    def __lt__(self, other: _Decimal) -> bool: ...
+    def __le__(self, other: _ComparableNum) -> bool: ...
+    def __lt__(self, other: _ComparableNum) -> bool: ...
     def __mod__(self, other: _Decimal) -> Decimal: ...
     def __mul__(self, other: _Decimal) -> Decimal: ...
     def __ne__(self, other: object) -> bool: ...
diff --git a/typeshed/stdlib/3/difflib.pyi b/typeshed/stdlib/3/difflib.pyi
index a05db8d..eaf068e 100644
--- a/typeshed/stdlib/3/difflib.pyi
+++ b/typeshed/stdlib/3/difflib.pyi
@@ -4,13 +4,13 @@
 
 from typing import (
     TypeVar, Callable, Iterable, Iterator, List, NamedTuple, Sequence, Tuple,
-    Generic
+    Generic, Optional
 )
 
 _T = TypeVar('_T')
 
 class SequenceMatcher(Generic[_T]):
-    def __init__(self, isjunk: Callable[[_T], bool] = ...,
+    def __init__(self, isjunk: Optional[Callable[[_T], bool]] = ...,
                  a: Sequence[_T] = ..., b: Sequence[_T] = ...,
                  autojunk: bool = ...) -> None: ...
     def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ...
diff --git a/typeshed/stdlib/3/http/__init__.pyi b/typeshed/stdlib/3/http/__init__.pyi
index e69de29..a9a77da 100644
--- a/typeshed/stdlib/3/http/__init__.pyi
+++ b/typeshed/stdlib/3/http/__init__.pyi
@@ -0,0 +1,68 @@
+import sys
+
+from enum import IntEnum
+
+if sys.version_info >= (3, 5):
+    class HTTPStatus(IntEnum):
+
+        def __init__(self, *a) -> None:
+            self.phrase = ...  # type: str
+            self.description = ...  # type: str
+
+        CONTINUE = ...  # type: object
+        SWITCHING_PROTOCOLS = ...  # type: object
+        PROCESSING = ...  # type: object
+        OK = ...  # type: object
+        CREATED = ...  # type: object
+        ACCEPTED = ...  # type: object
+        NON_AUTHORITATIVE_INFORMATION = ...  # type: object
+        NO_CONTENT = ...  # type: object
+        RESET_CONTENT = ...  # type: object
+        PARTIAL_CONTENT = ...  # type: object
+        MULTI_STATUS = ...  # type: object
+        ALREADY_REPORTED = ...  # type: object
+        IM_USED = ...  # type: object
+        MULTIPLE_CHOICES = ...  # type: object
+        MOVED_PERMANENTLY = ...  # type: object
+        FOUND = ...  # type: object
+        SEE_OTHER = ...  # type: object
+        NOT_MODIFIED = ...  # type: object
+        USE_PROXY = ...  # type: object
+        TEMPORARY_REDIRECT = ...  # type: object
+        PERMANENT_REDIRECT = ...  # type: object
+        BAD_REQUEST = ...  # type: object
+        UNAUTHORIZED = ...  # type: object
+        PAYMENT_REQUIRED = ...  # type: object
+        FORBIDDEN = ...  # type: object
+        NOT_FOUND = ...  # type: object
+        METHOD_NOT_ALLOWED = ...  # type: object
+        NOT_ACCEPTABLE = ...  # type: object
+        PROXY_AUTHENTICATION_REQUIRED = ...  # type: object
+        REQUEST_TIMEOUT = ...  # type: object
+        CONFLICT = ...  # type: object
+        GONE = ...  # type: object
+        LENGTH_REQUIRED = ...  # type: object
+        PRECONDITION_FAILED = ...  # type: object
+        REQUEST_ENTITY_TOO_LARGE = ...  # type: object
+        REQUEST_URI_TOO_LONG = ...  # type: object
+        UNSUPPORTED_MEDIA_TYPE = ...  # type: object
+        REQUESTED_RANGE_NOT_SATISFIABLE = ...  # type: object
+        EXPECTATION_FAILED = ...  # type: object
+        UNPROCESSABLE_ENTITY = ...  # type: object
+        LOCKED = ...  # type: object
+        FAILED_DEPENDENCY = ...  # type: object
+        UPGRADE_REQUIRED = ...  # type: object
+        PRECONDITION_REQUIRED = ...  # type: object
+        TOO_MANY_REQUESTS = ...  # type: object
+        REQUEST_HEADER_FIELDS_TOO_LARGE = ...  # type: object
+        INTERNAL_SERVER_ERROR = ...  # type: object
+        NOT_IMPLEMENTED = ...  # type: object
+        BAD_GATEWAY = ...  # type: object
+        SERVICE_UNAVAILABLE = ...  # type: object
+        GATEWAY_TIMEOUT = ...  # type: object
+        HTTP_VERSION_NOT_SUPPORTED = ...  # type: object
+        VARIANT_ALSO_NEGOTIATES = ...  # type: object
+        INSUFFICIENT_STORAGE = ...  # type: object
+        LOOP_DETECTED = ...  # type: object
+        NOT_EXTENDED = ...  # type: object
+        NETWORK_AUTHENTICATION_REQUIRED = ...  # type: object
diff --git a/typeshed/stdlib/3/inspect.pyi b/typeshed/stdlib/3/inspect.pyi
index 0f7af37..5fc5b19 100644
--- a/typeshed/stdlib/3/inspect.pyi
+++ b/typeshed/stdlib/3/inspect.pyi
@@ -15,7 +15,7 @@ ModuleInfo = NamedTuple('ModuleInfo', [('name', str),
                                        ])
 def getmembers(object: object,
                predicate: Callable[[Any], bool] = ...,
-               ) -> List[Tuple[str, object]]: ...
+               ) -> List[Tuple[str, Any]]: ...
 def getmoduleinfo(path: str) -> Optional[ModuleInfo]: ...
 def getmodulename(path: str) -> Optional[str]: ...
 
diff --git a/typeshed/stdlib/3/numbers.pyi b/typeshed/stdlib/3/numbers.pyi
deleted file mode 100644
index 8bea0b0..0000000
--- a/typeshed/stdlib/3/numbers.pyi
+++ /dev/null
@@ -1,80 +0,0 @@
-# Stubs for numbers (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-class Number:
-    __hash__ = ... # type: Any
-
-class Complex(Number):
-    def __complex__(self): ...
-    def __bool__(self): ...
-    @property
-    def real(self): ...
-    @property
-    def imag(self): ...
-    def __add__(self, other): ...
-    def __radd__(self, other): ...
-    def __neg__(self): ...
-    def __pos__(self): ...
-    def __sub__(self, other): ...
-    def __rsub__(self, other): ...
-    def __mul__(self, other): ...
-    def __rmul__(self, other): ...
-    def __truediv__(self, other): ...
-    def __rtruediv__(self, other): ...
-    def __pow__(self, exponent): ...
-    def __rpow__(self, base): ...
-    def __abs__(self): ...
-    def conjugate(self): ...
-    def __eq__(self, other): ...
-
-class Real(Complex):
-    def __float__(self): ...
-    def __trunc__(self): ...
-    def __floor__(self): ...
-    def __ceil__(self): ...
-    def __round__(self, ndigits=None): ...
-    def __divmod__(self, other): ...
-    def __rdivmod__(self, other): ...
-    def __floordiv__(self, other): ...
-    def __rfloordiv__(self, other): ...
-    def __mod__(self, other): ...
-    def __rmod__(self, other): ...
-    def __lt__(self, other): ...
-    def __le__(self, other): ...
-    def __complex__(self): ...
-    @property
-    def real(self): ...
-    @property
-    def imag(self): ...
-    def conjugate(self): ...
-
-class Rational(Real):
-    @property
-    def numerator(self): ...
-    @property
-    def denominator(self): ...
-    def __float__(self): ...
-
-class Integral(Rational):
-    def __int__(self): ...
-    def __index__(self): ...
-    def __pow__(self, exponent, modulus=None): ...
-    def __lshift__(self, other): ...
-    def __rlshift__(self, other): ...
-    def __rshift__(self, other): ...
-    def __rrshift__(self, other): ...
-    def __and__(self, other): ...
-    def __rand__(self, other): ...
-    def __xor__(self, other): ...
-    def __rxor__(self, other): ...
-    def __or__(self, other): ...
-    def __ror__(self, other): ...
-    def __invert__(self): ...
-    def __float__(self): ...
-    @property
-    def numerator(self): ...
-    @property
-    def denominator(self): ...
diff --git a/typeshed/stdlib/3/signal.pyi b/typeshed/stdlib/3/signal.pyi
index f956e05..9e599f9 100644
--- a/typeshed/stdlib/3/signal.pyi
+++ b/typeshed/stdlib/3/signal.pyi
@@ -1,5 +1,7 @@
 """Stub file for the 'signal' module."""
 
+import sys
+from enum import IntEnum
 from typing import Any, Callable, List, Tuple, Dict, Generic, Union, Optional, Iterable, Set
 from types import FrameType
 
@@ -10,54 +12,115 @@ ITIMER_REAL = ...  # type: int
 ITIMER_VIRTUAL = ...  # type: int
 
 NSIG = ...  # type: int
-SIGABRT = ...  # type: int
-SIGALRM = ...  # type: int
-SIGBUS = ...  # type: int
-SIGCHLD = ...  # type: int
-SIGCLD = ...  # type: int
-SIGCONT = ...  # type: int
-SIGFPE = ...  # type: int
-SIGHUP = ...  # type: int
-SIGILL = ...  # type: int
-SIGINT = ...  # type: int
-SIGIO = ...  # type: int
-SIGIOT = ...  # type: int
-SIGKILL = ...  # type: int
-SIGPIPE = ...  # type: int
-SIGPOLL = ...  # type: int
-SIGPROF = ...  # type: int
-SIGPWR = ...  # type: int
-SIGQUIT = ...  # type: int
-SIGRTMAX = ...  # type: int
-SIGRTMIN = ...  # type: int
-SIGSEGV = ...  # type: int
-SIGSTOP = ...  # type: int
-SIGSYS = ...  # type: int
-SIGTERM = ...  # type: int
-SIGTRAP = ...  # type: int
-SIGTSTP = ...  # type: int
-SIGTTIN = ...  # type: int
-SIGTTOU = ...  # type: int
-SIGURG = ...  # type: int
-SIGUSR1 = ...  # type: int
-SIGUSR2 = ...  # type: int
-SIGVTALRM = ...  # type: int
-SIGWINCH = ...  # type: int
-SIGXCPU = ...  # type: int
-SIGXFSZ = ...  # type: int
-
-SIG_DFL = ...  # type: int
-SIG_IGN = ...  # type: int
+
+if sys.version_info >= (3, 5):
+    class Signals(IntEnum):
+        SIGABRT = ...
+        SIGALRM = ...
+        SIGBUS = ...
+        SIGCHLD = ...
+        SIGCLD = ...
+        SIGCONT = ...
+        SIGFPE = ...
+        SIGHUP = ...
+        SIGILL = ...
+        SIGINT = ...
+        SIGIO = ...
+        SIGIOT = ...
+        SIGKILL = ...
+        SIGPIPE = ...
+        SIGPOLL = ...
+        SIGPROF = ...
+        SIGPWR = ...
+        SIGQUIT = ...
+        SIGRTMAX = ...
+        SIGRTMIN = ...
+        SIGSEGV = ...
+        SIGSTOP = ...
+        SIGSYS = ...
+        SIGTERM = ...
+        SIGTRAP = ...
+        SIGTSTP = ...
+        SIGTTIN = ...
+        SIGTTOU = ...
+        SIGURG = ...
+        SIGUSR1 = ...
+        SIGUSR2 = ...
+        SIGVTALRM = ...
+        SIGWINCH = ...
+        SIGXCPU = ...
+        SIGXFSZ = ...
+
+    class Handlers(IntEnum):
+        SIG_DFL = ...
+        SIG_IGN = ...
+
+    SIG_DFL = Handlers.SIG_DFL
+    SIG_IGN = Handlers.SIG_IGN
+
+    class Sigmasks(IntEnum):
+        SIG_BLOCK = ...
+        SIG_UNBLOCK = ...
+        SIG_SETMASK = ...
+
+    SIG_BLOCK = Sigmasks.SIG_BLOCK
+    SIG_UNBLOCK = Sigmasks.SIG_UNBLOCK
+    SIG_SETMASK = Sigmasks.SIG_SETMASK
+
+    _SIG = Signals
+    _SIGNUM = Union[int, Signals]
+    _HANDLER = Union[Callable[[Signals, FrameType], None], int, Handlers, None]
+else:
+    SIG_DFL = ...  # type: int
+    SIG_IGN = ...  # type: int
+
+    SIG_BLOCK = ...  # type: int
+    SIG_UNBLOCK = ...  # type: int
+    SIG_SETMASK = ...  # type: int
+
+    _SIG = int
+    _SIGNUM = int
+    _HANDLER = Union[Callable[[int, FrameType], None], int, None]
+
+SIGABRT = ...  # type: _SIG
+SIGALRM = ...  # type: _SIG
+SIGBUS = ...  # type: _SIG
+SIGCHLD = ...  # type: _SIG
+SIGCLD = ...  # type: _SIG
+SIGCONT = ...  # type: _SIG
+SIGFPE = ...  # type: _SIG
+SIGHUP = ...  # type: _SIG
+SIGILL = ...  # type: _SIG
+SIGINT = ...  # type: _SIG
+SIGIO = ...  # type: _SIG
+SIGIOT = ...  # type: _SIG
+SIGKILL = ...  # type: _SIG
+SIGPIPE = ...  # type: _SIG
+SIGPOLL = ...  # type: _SIG
+SIGPROF = ...  # type: _SIG
+SIGPWR = ...  # type: _SIG
+SIGQUIT = ...  # type: _SIG
+SIGRTMAX = ...  # type: _SIG
+SIGRTMIN = ...  # type: _SIG
+SIGSEGV = ...  # type: _SIG
+SIGSTOP = ...  # type: _SIG
+SIGSYS = ...  # type: _SIG
+SIGTERM = ...  # type: _SIG
+SIGTRAP = ...  # type: _SIG
+SIGTSTP = ...  # type: _SIG
+SIGTTIN = ...  # type: _SIG
+SIGTTOU = ...  # type: _SIG
+SIGURG = ...  # type: _SIG
+SIGUSR1 = ...  # type: _SIG
+SIGUSR2 = ...  # type: _SIG
+SIGVTALRM = ...  # type: _SIG
+SIGWINCH = ...  # type: _SIG
+SIGXCPU = ...  # type: _SIG
+SIGXFSZ = ...  # type: _SIG
 
 CTRL_C_EVENT = 0 # Windows
 CTRL_BREAK_EVENT = 0 # Windows
 
-SIG_BLOCK = ...  # type: int
-SIG_UNBLOCK = ...  # type: int
-SIG_SETMASK = ...  # type: int
-
-_HANDLER = Union[Callable[[int, FrameType], None], int, None]
-
 class struct_siginfo(Tuple[int, int, int, int, int, int, int]):
     def __init__(self, sequence: Iterable[int]) -> None: ...
     @property
@@ -82,7 +145,7 @@ def default_int_handler(signum: int, frame: FrameType) -> None:
 
 def getitimer(which: int) -> Tuple[float, float]: ...
 
-def getsignal(signalnum: int) -> _HANDLER:
+def getsignal(signalnum: _SIGNUM) -> _HANDLER:
     raise ValueError()
 
 def pause() -> None: ...
@@ -90,7 +153,7 @@ def pause() -> None: ...
 def pthread_kill(thread_id: int, signum: int) -> None:
     raise OSError()
 
-def pthread_sigmask(how: int, mask: Iterable[int]) -> Set[int]:
+def pthread_sigmask(how: int, mask: Iterable[int]) -> Set[_SIGNUM]:
     raise OSError()
 
 def set_wakeup_fd(fd: int) -> int: ...
@@ -100,7 +163,7 @@ def setitimer(which: int, seconds: float, interval: float = ...) -> Tuple[float,
 def siginterrupt(signalnum: int, flag: bool) -> None:
     raise OSError()
 
-def signal(signalnum: int, handler: _HANDLER) -> _HANDLER:
+def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER:
     raise OSError()
 
 def sigpending() -> Any:
@@ -110,7 +173,7 @@ def sigtimedwait(sigset: Iterable[int], timeout: float) -> Optional[struct_sigin
     raise OSError()
     raise ValueError()
 
-def sigwait(sigset: Iterable[int]) -> int:
+def sigwait(sigset: Iterable[int]) -> _SIGNUM:
     raise OSError()
 
 def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo:
diff --git a/typeshed/stdlib/3/socket.pyi b/typeshed/stdlib/3/socket.pyi
index cd2f3d5..8451cfb 100644
--- a/typeshed/stdlib/3/socket.pyi
+++ b/typeshed/stdlib/3/socket.pyi
@@ -5,7 +5,7 @@
 # see: http://hg.python.org/cpython/file/3d0686d90f55/Lib/socket.py
 # see: http://nullege.com/codes/search/socket
 
-from typing import Any, Tuple, Union, List, overload
+from typing import Any, Tuple, List, Optional, Union, overload
 
 # ----- variables and constants -----
 
@@ -277,7 +277,7 @@ class socket:
     proto = 0
 
     def __init__(self, family: int = ..., type: int = ...,
-                 proto: int = ..., fileno: int = ...) -> None: ...
+                 proto: int = ..., fileno: Optional[int] = ...) -> None: ...
 
     # --- methods ---
     # second tuple item is an address
diff --git a/typeshed/stdlib/3/stat.pyi b/typeshed/stdlib/3/stat.pyi
index 374373f..60600ed 100644
--- a/typeshed/stdlib/3/stat.pyi
+++ b/typeshed/stdlib/3/stat.pyi
@@ -14,7 +14,7 @@ def S_ISLNK(mode: int) -> bool: ...
 def S_ISSOCK(mode: int) -> bool: ...
 
 def S_IMODE(mode: int) -> int: ...
-def S_IFMT(mode) -> int: ...
+def S_IFMT(mode: int) -> int: ...
 
 ST_MODE = 0
 ST_INO = 0
diff --git a/typeshed/stdlib/3/string.pyi b/typeshed/stdlib/3/string.pyi
index e2c7291..365449e 100644
--- a/typeshed/stdlib/3/string.pyi
+++ b/typeshed/stdlib/3/string.pyi
@@ -20,8 +20,8 @@ class Template:
     template = ...  # type: str
 
     def __init__(self, template: str) -> None: ...
-    def substitute(self, mapping: Mapping[str, str], **kwds: str) -> str: ...
-    def safe_substitute(self, mapping: Mapping[str, str],
+    def substitute(self, mapping: Mapping[str, str] = ..., **kwds: str) -> str: ...
+    def safe_substitute(self, mapping: Mapping[str, str] = ...,
                         **kwds: str) -> str: ...
 
 # TODO(MichalPokorny): This is probably badly and/or loosely typed.
diff --git a/typeshed/stdlib/3/subprocess.pyi b/typeshed/stdlib/3/subprocess.pyi
index fd9e95b..8af789f 100644
--- a/typeshed/stdlib/3/subprocess.pyi
+++ b/typeshed/stdlib/3/subprocess.pyi
@@ -2,7 +2,8 @@
 
 # Based on http://docs.python.org/3.5/library/subprocess.html
 import sys
-from typing import Sequence, Any, Mapping, Callable, Tuple, IO, Optional, Union, List
+from typing import Sequence, Any, Mapping, Callable, Tuple, IO, Optional, Union, List, Type
+from types import TracebackType
 
 
 if sys.version_info >= (3, 5):
@@ -15,7 +16,7 @@ if sys.version_info >= (3, 5):
                      returncode: int, 
                      stdout: Union[str, bytes], 
                      stderr: Union[str, bytes]) -> None: ...
-        def check_returncode(self): ...
+        def check_returncode(self) -> None: ...
     
     # Nearly same args as Popen.__init__ except for timeout, input, and check
     def run(args: Union[str, Sequence[str]],
@@ -235,7 +236,7 @@ class Popen:
     def terminate(self) -> None: ...
     def kill(self) -> None: ...
     def __enter__(self) -> 'Popen': ...
-    def __exit__(self, type, value, traceback) -> bool: ...
+    def __exit__(self, type: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType]) -> bool: ...
 
 def getstatusoutput(cmd: str) -> Tuple[int, str]: ...
 def getoutput(cmd: str) -> str: ...
diff --git a/typeshed/stdlib/3/sys.pyi b/typeshed/stdlib/3/sys.pyi
index c3ce9c6..28b23a5 100644
--- a/typeshed/stdlib/3/sys.pyi
+++ b/typeshed/stdlib/3/sys.pyi
@@ -115,7 +115,7 @@ def call_tracing(fn: Callable[..., _T], args: Any) -> _T: ...
 def _clear_type_cache() -> None: ...
 def _current_frames() -> Dict[int, Any]: ...
 def displayhook(value: Optional[int]) -> None: ...
-def excepthook(type_: type, value: BaseException,
+def excepthook(type_: Type[BaseException], value: BaseException,
                traceback: TracebackType) -> None: ...
 # TODO should be a union of tuple, see mypy#1178
 def exc_info() -> Tuple[Optional[Type[BaseException]],
diff --git a/typeshed/stdlib/3/tarfile.pyi b/typeshed/stdlib/3/tarfile.pyi
deleted file mode 100644
index 98f68a6..0000000
--- a/typeshed/stdlib/3/tarfile.pyi
+++ /dev/null
@@ -1,33 +0,0 @@
-# TODO these are incomplete
-
-from typing import Any, List, overload, Callable
-
-class TarError(Exception): ...
-
-class TarInfo:
-    name = ...  # type: str
-    size = 0
-    uid = 0
-    gid = 0
-
-class TarFile:
-    def getmember(self, name: str) -> TarInfo: ...
-    def getmembers(self) -> List[TarInfo]: ...
-    def getnames(self) -> List[str]: ...
-    def extractall(self, path: str = ...,
-                   members: List[TarInfo] = ...) -> None: ...
-
-    @overload
-    def extract(self, member: str, path: str = ...,
-                set_attrs: bool = ...) -> None: ...
-    @overload
-    def extract(self, member: TarInfo, path: str = ...,
-                set_attrs: bool = ...) -> None: ...
-
-    def add(self, name: str, arcname: str = ..., recursive: bool = ...,
-            exclude: Callable[[str], bool] = ..., *,
-            filter: 'Callable[[TarFile], TarFile]' = ...) -> None: ...
-    def close(self) -> None: ...
-
-def open(name: str = ..., mode: str = ..., fileobj: Any = ..., bufsize: int = ...,
-         **kwargs) -> TarFile: ...
diff --git a/typeshed/stdlib/3/tempfile.pyi b/typeshed/stdlib/3/tempfile.pyi
index ce73d8a..4398b41 100644
--- a/typeshed/stdlib/3/tempfile.pyi
+++ b/typeshed/stdlib/3/tempfile.pyi
@@ -3,7 +3,8 @@
 
 # based on http://docs.python.org/3.3/library/tempfile.html
 
-from typing import Tuple, BinaryIO
+from types import TracebackType
+from typing import BinaryIO, Optional, Tuple, Type
 
 # global variables
 tempdir = ...  # type: str
@@ -20,7 +21,7 @@ def TemporaryFile(
 def NamedTemporaryFile(
             mode: str = ..., buffering: int = ..., encoding: str = ...,
             newline: str = ..., suffix: str = ..., prefix: str = ...,
-            dir: str = ..., delete=...) -> BinaryIO:
+            dir: str = ..., delete: bool =...) -> BinaryIO:
     ...
 def SpooledTemporaryFile(
             max_size: int = ..., mode: str = ..., buffering: int = ...,
@@ -34,7 +35,9 @@ class TemporaryDirectory:
                  dir: str = ...) -> None: ...
     def cleanup(self) -> None: ...
     def __enter__(self) -> str: ...
-    def __exit__(self, type, value, traceback) -> bool: ...
+    def __exit__(self, exc_type: Optional[Type[BaseException]],
+                 exc_val: Optional[Exception],
+                 exc_tb: Optional[TracebackType]) -> bool: ...
 
 def mkstemp(suffix: str = ..., prefix: str = ..., dir: str = ...,
             text: bool = ...) -> Tuple[int, str]: ...
diff --git a/typeshed/stdlib/3/time.pyi b/typeshed/stdlib/3/time.pyi
index a1b8626..f77f9ba 100644
--- a/typeshed/stdlib/3/time.pyi
+++ b/typeshed/stdlib/3/time.pyi
@@ -1,10 +1,12 @@
 # Stubs for time
 # Ron Murawski <ron at horizonchess.com>
 
-# based on: http://docs.python.org/3.2/library/time.html#module-time
+# based on: http://docs.python.org/3.3/library/time.html#module-time
 # see: http://nullege.com/codes/search?cq=time
 
+import sys
 from typing import Tuple, Union
+from types import SimpleNamespace
 
 # ----- variables and constants -----
 accept2dyear = False
@@ -13,6 +15,14 @@ daylight = 0
 timezone = 0
 tzname = ... # type: Tuple[str, str]
 
+if sys.version_info >= (3, 3) and sys.platform != 'win32':
+    CLOCK_HIGHRES = 0  # Solaris only
+    CLOCK_MONOTONIC = 0  # Unix only
+    CLOCK_MONOTONIC_RAW = 0  # Linux 2.6.28 or later
+    CLOCK_PROCESS_CPUTIME_ID = 0  # Unix only
+    CLOCK_REALTIME = 0  # Unix only
+    CLOCK_THREAD_CPUTIME_ID = 0  # Unix only
+
 
 # ----- classes/methods -----
 class struct_time:
@@ -32,33 +42,38 @@ class struct_time:
     tm_wday = 0
     tm_yday = 0
     tm_isdst = 0
-
+    if sys.version_info >= (3, 3):
+        tm_gmtoff = 0
+        tm_zone = 'GMT'
 
 # ----- functions -----
 def asctime(t: Union[Tuple[int, int, int, int, int, int, int, int, int],
                      struct_time,
                      None] = ...) -> str: ...  # return current time
-
 def clock() -> float: ...
-
 def ctime(secs: Union[float, None] = ...) -> str: ...  # return current time
-
 def gmtime(secs: Union[float, None] = ...) -> struct_time: ...  # return current time
-
 def localtime(secs: Union[float, None] = ...) -> struct_time: ...  # return current time
-
 def mktime(t: Union[Tuple[int, int, int, int, int,
                           int, int, int, int],
                     struct_time]) -> float: ...
-
 def sleep(secs: Union[int, float]) -> None: ...
-
 def strftime(format: str, t: Union[Tuple[int, int, int, int, int,
                                          int, int, int, int],
                                    struct_time,
                                    None] = ...) -> str: ...  # return current time
-
 def strptime(string: str,
              format: str = ...) -> struct_time: ...
 def time() -> float: ...
-def tzset() -> None: ...  # Unix only
+if sys.platform != 'win32':
+    def tzset() -> None: ...  # Unix only
+
+if sys.version_info >= (3, 3):
+    def get_clock_info(str) -> SimpleNamespace: ...
+    def monotonic() -> float: ...
+    def perf_counter() -> float: ...
+    def process_time() -> float: ...
+    if sys.platform != 'win32':
+        def clock_getres(int) -> float: ...  # Unix only
+        def clock_gettime(int) -> float: ...  # Unix only
+        def clock_settime(int, struct_time) -> float: ...  # Unix only
diff --git a/typeshed/stdlib/3/typing.pyi b/typeshed/stdlib/3/typing.pyi
index e40ff42..9629020 100644
--- a/typeshed/stdlib/3/typing.pyi
+++ b/typeshed/stdlib/3/typing.pyi
@@ -6,7 +6,6 @@ from abc import abstractmethod, ABCMeta
 # Definitions of special type checking related constructs.  Their definition
 # are not used, so their value does not matter.
 
-cast = object()
 overload = object()
 Any = object()
 TypeVar = object()
@@ -16,9 +15,7 @@ Callable = object()
 Type = object()
 builtinclass = object()
 _promote = object()
-NamedTuple = object()
 no_type_check = object()
-NewType = object()
 
 # Type aliases and type constructors
 
@@ -226,16 +223,18 @@ class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]):
 
 # TODO: ContextManager (only if contextlib.AbstractContextManager exists)
 
-class Mapping(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT]):
-    # TODO: Value type should be covariant, but currently we can't give a good signature for
-    #   get if this is the case.
+class Mapping(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT_co]):
+    # TODO: We wish the key type could also be covariant, but that doesn't work,
+    # see discussion in https://github.com/python/typing/pull/273.
     @abstractmethod
-    def __getitem__(self, k: _KT) -> _VT: ...
+    def __getitem__(self, k: _KT) -> _VT_co:
+        ...
     # Mixin methods
-    def get(self, k: _KT, default: _VT = ...) -> _VT: ...
-    def items(self) -> AbstractSet[Tuple[_KT, _VT]]: ...
+    def get(self, k: _KT, default: _VT_co = ...) -> _VT_co:  # type: ignore
+        ...
+    def items(self) -> AbstractSet[Tuple[_KT, _VT_co]]: ...
     def keys(self) -> AbstractSet[_KT]: ...
-    def values(self) -> ValuesView[_VT]: ...
+    def values(self) -> ValuesView[_VT_co]: ...
     def __contains__(self, o: object) -> bool: ...
 
 class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
@@ -267,7 +266,7 @@ Text = str
 
 TYPE_CHECKING = True
 
-class IO(Iterable[AnyStr], Generic[AnyStr]):
+class IO(Iterator[AnyStr], Generic[AnyStr]):
     # TODO detach
     # TODO use abstract properties
     @property
@@ -311,11 +310,15 @@ class IO(Iterable[AnyStr], Generic[AnyStr]):
     def writelines(self, lines: Iterable[AnyStr]) -> None: ...
 
     @abstractmethod
+    def __next__(self) -> AnyStr: ...
+    @abstractmethod
     def __iter__(self) -> Iterator[AnyStr]: ...
     @abstractmethod
     def __enter__(self) -> 'IO[AnyStr]': ...
     @abstractmethod
-    def __exit__(self, t: type = None, value: BaseException = None, traceback: Any = None) -> bool: ...
+    def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException],
+                 # TODO: traceback should be TracebackType but that's defined in types
+                 traceback: Optional[Any]) -> bool: ...
 
 class BinaryIO(IO[bytes]):
     # TODO readinto
@@ -414,3 +417,14 @@ class Pattern(Generic[AnyStr]):
 # Functions
 
 def get_type_hints(obj: Callable) -> dict[str, Any]: ...
+
+def cast(tp: Type[_T], obj: Any) -> _T: ...
+
+# Type constructors
+
+# NamedTuple is special-cased in the type checker; the initializer is ignored.
+def NamedTuple(typename: str, fields: Iterable[Tuple[str, Any]], *,
+               verbose: bool = ..., rename: bool = ..., module: str = None) -> Type[tuple]: ...
+
+def NewType(name: str, tp: Type[_T]) -> Type[_T]: ...
+
diff --git a/typeshed/stdlib/3/unittest.pyi b/typeshed/stdlib/3/unittest.pyi
index a727709..933e8f1 100644
--- a/typeshed/stdlib/3/unittest.pyi
+++ b/typeshed/stdlib/3/unittest.pyi
@@ -8,6 +8,7 @@ from typing import (
 import logging
 import sys
 from types import ModuleType, TracebackType
+from contextlib import ContextManager
 
 
 _T = TypeVar('_T')
@@ -37,7 +38,7 @@ class TestCase:
     def run(self, result: Optional[TestResult] = ...) -> TestCase: ...
     def skipTest(self, reason: Any) -> None: ...
     if sys.version_info >= (3, 4):
-        def subTest(self, msg: Any = ..., **params: Any) -> None: ...
+        def subTest(self, msg: Any = ..., **params: Any) -> ContextManager[None]: ...
     def debug(self) -> None: ...
     def assertEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ...
     def assertNotEqual(self, first: Any, second: Any,
diff --git a/typeshed/tests/mypy_test.py b/typeshed/tests/mypy_test.py
index dc02246..90dd737 100755
--- a/typeshed/tests/mypy_test.py
+++ b/typeshed/tests/mypy_test.py
@@ -32,7 +32,10 @@ def log(args, *varargs):
         print(*varargs)
 
 
-def match(args, fn):
+def match(fn, args, blacklist):
+    if blacklist.match(fn):
+        log(args, fn, 'exluded by blacklist')
+        return False
     if not args.filter and not args.exclude:
         log(args, fn, 'accept by default')
         return True
@@ -70,6 +73,10 @@ def libpath(major, minor):
 def main():
     args = parser.parse_args()
 
+    with open(os.path.join(os.path.dirname(__file__), "mypy_blacklist.txt")) as f:
+        blacklist = re.compile("(%s)$" % "|".join(
+            re.findall(r"^\s*([^\s#]+)\s*(?:#.*)?$", f.read(), flags=re.M)))
+
     try:
         from mypy.main import main as mypy_main
     except ImportError:
@@ -95,10 +102,10 @@ def main():
             for name in names:
                 full = os.path.join(root, name)
                 mod, ext = os.path.splitext(name)
-                if mod in seen:
+                if mod in seen or mod.startswith('.'):
                     continue
                 if ext in ['.pyi', '.py']:
-                    if match(args, full):
+                    if match(full, args, blacklist):
                         seen.add(mod)
                         files.append(full)
                 elif (os.path.isfile(os.path.join(full, '__init__.pyi')) or
@@ -110,14 +117,15 @@ def main():
                             m, x = os.path.splitext(f)
                             if x in ['.pyi', '.py']:
                                 fn = os.path.join(r, f)
-                                if match(args, fn):
+                                if match(fn, args, blacklist):
                                     seen.add(mod)
                                     files.append(fn)
         if files:
             runs += 1
             flags = ['--python-version', '%d.%d' % (major, minor)]
             flags.append('--strict-optional')
-            ##flags.append('--warn-unused-ignores')
+            ##flags.append('--fast-parser')  # Travis CI doesn't have typed_ast yet.
+            ##flags.append('--warn-unused-ignores')  # Fast parser and regular parser disagree.
             sys.argv = ['mypy'] + flags + files
             if args.verbose:
                 print("running", ' '.join(sys.argv))
diff --git a/typeshed/third_party/2.7/itsdangerous.pyi b/typeshed/third_party/2.7/itsdangerous.pyi
new file mode 100644
index 0000000..186d531
--- /dev/null
+++ b/typeshed/third_party/2.7/itsdangerous.pyi
@@ -0,0 +1,153 @@
+# Stubs for itsdangerous (Python 2.7)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from datetime import datetime
+from itertools import izip
+from typing import Any, Callable, IO, MutableMapping, Optional, Text, Tuple, Union
+
+PY2 = ...  # type: bool
+text_type = unicode
+int_to_byte = chr
+number_types = (int, long, float)
+
+bytes_like = Union[bytearray, str]
+
+class _CompactJSON:
+    def loads(self, payload: Text) -> Any: ...
+    def dumps(self, obj: Any) -> Text: ...
+
+compact_json = _CompactJSON
+EPOCH = ...  # type: int
+
+def want_bytes(s: str, encoding='', errors='') -> str: ...
+def is_text_serializer(serializer: Any) -> bool: ...
+def constant_time_compare(val1: bytes_like, val2: bytes_like) -> bool: ...
+
+class BadData(Exception):
+    message = ...  # type: str
+    def __init__(self, message: str) -> None: ...
+
+class BadPayload(BadData):
+    original_error = ...  # type: Optional[Exception]
+    def __init__(self, message: str, original_error: Optional[Exception]=None) -> None: ...
+
+class BadSignature(BadData):
+    payload = ...  # type: Optional[Any]
+    def __init__(self, message: str, payload: Optional[Any]=None) -> None: ...
+
+class BadTimeSignature(BadSignature):
+    date_signed = ...  # type: Optional[int]
+    def __init__(self, message, payload: Optional[Any]=None, date_signed: Optional[int]=None) -> None: ...
+
+class BadHeader(BadSignature):
+    header = ...  # type: Any
+    original_error = ...  # type: Any
+    def __init__(self, message, payload=None, header=None, original_error=None) -> None: ...
+
+class SignatureExpired(BadTimeSignature): ...
+
+def base64_encode(string: bytes_like) -> str: ...
+def base64_decode(string: bytes_like) -> str: ...
+def int_to_bytes(num: int) -> str: ...
+def bytes_to_int(bytestr: bytes_like) -> int: ...
+
+class SigningAlgorithm:
+    def get_signature(self, key: bytes_like, value: bytes_like) -> str: ...
+    def verify_signature(self, key: bytes_like, value: bytes_like, sig: bytes_like) -> bool: ...
+
+class NoneAlgorithm(SigningAlgorithm):
+    def get_signature(self, key: bytes_like, value: bytes_like) -> str: ...
+
+class HMACAlgorithm(SigningAlgorithm):
+    default_digest_method = ...  # type: Callable
+    digest_method = ...  # type: Callable
+    def __init__(self, digest_method: Optional[Callable]=None) -> None: ...
+    def get_signature(self, key: bytes_like, value: bytes_like) -> str: ...
+
+class Signer:
+    default_digest_method = ...  # type: Callable
+    default_key_derivation = ...  # type: str
+    secret_key = ...  # type: bytes_like
+    sep = ...  # type: str
+    salt = ...  # type: bytes_like
+    key_derivation = ...  # type: str
+    digest_method = ...  # type: Callable
+    algorithm = ...  # type: SigningAlgorithm
+    def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like]=None, sep: Optional[str]='',
+                 key_derivation: Optional[str]=None,
+                 digest_method: Optional[Callable]=None,
+                 algorithm: Optional[SigningAlgorithm]=None) -> None: ...
+    def derive_key(self) -> str: ...
+    def get_signature(self, value: bytes_like) -> str: ...
+    def sign(self, value: bytes_like) -> str: ...
+    def verify_signature(self, value: bytes_like, sig: bytes_like) -> bool: ...
+    def unsign(self, signed_value: str) -> str: ...
+    def validate(self, signed_value: str) -> bool: ...
+
+class TimestampSigner(Signer):
+    def get_timestamp(self) -> int: ...
+    def timestamp_to_datetime(self, ts: int) -> datetime: ...
+    def sign(self, value: bytes_like) -> str: ...
+    def unsign(self, value: str, max_age: Optional[int]=None, return_timestamp=False) -> Any: ...
+    def validate(self, signed_value: str, max_age: Optional[int]=None) -> bool: ...
+
+class Serializer:
+    default_serializer = ...  # type: Any
+    default_signer = ...  # type: Callable[..., Signer]
+    secret_key = ...  # type: Any
+    salt = ...  # type: bytes_like
+    serializer = ...  # type: Any
+    is_text_serializer = ...  # type: bool
+    signer = ...  # type: Signer
+    signer_kwargs = ...  # type: MutableMapping
+    def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like]=b'', serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None) -> None: ...
+    def load_payload(self, payload: Any, serializer=None) -> Any: ...
+    def dump_payload(self, *args, **kwargs) -> str: ...
+    def make_signer(self, salt: Optional[bytes_like]=None) -> Signer: ...
+    def dumps(self, obj: Any, salt: Optional[bytes_like]=None) -> str: ...
+    def dump(self, obj: Any, f: IO[str], salt: Optional[bytes_like]=None) -> None: ...
+    def loads(self, s: str, salt: Optional[bytes_like]=None) -> Any: ...
+    def load(self, f: IO[str], salt: Optional[bytes_like]=None): ...
+    def loads_unsafe(self, s, salt: Optional[bytes_like]=None) -> Tuple[bool, Any]: ...
+    def load_unsafe(self, f: IO[str], *args, **kwargs) -> Tuple[bool, Any]: ...
+
+class TimedSerializer(Serializer):
+    default_signer = ...  # type: Callable[..., TimestampSigner]
+    def loads(self, s: str, salt: Optional[bytes_like]=None, max_age: Optional[int]=None, return_timestamp=False) -> Any: ...
+    def loads_unsafe(self, s: str, salt: Optional[bytes_like]=None, max_age: Optional[int]=None) -> Tuple[bool, Any]: ...
+
+class JSONWebSignatureSerializer(Serializer):
+    jws_algorithms = ...  # type: MutableMapping[str, SigningAlgorithm]
+    default_algorithm = ...  # type: str
+    default_serializer = ...  # type: Any
+    algorithm_name = ...  # type: str
+    algorithm = ...  # type: Any
+    def __init__(self, secret_key: bytes_like, salt: Optional[bytes_like]=None, serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None, algorithm_name: Optional[str]=None) -> None: ...
+    def load_payload(self, payload: Any, return_header=False) -> Any: ...
+    def dump_payload(self, *args, **kwargs) -> str: ...
+    def make_algorithm(self, algorithm_name: str) -> SigningAlgorithm: ...
+    def make_signer(self, salt: Optional[bytes_like]=None, algorithm_name: Optional[str]=None) -> Signer: ...
+    def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ...
+    def dumps(self, obj: Any, salt: Optional[bytes_like]=None, header_fields=Optional[MutableMapping]) -> str: ...
+    def loads(self, s: str, salt: Optional[bytes_like]=None, return_header=False) -> Any: ...
+    def loads_unsafe(self, s, salt: Optional[bytes_like]=None, return_header=False) -> Tuple[bool, Any]: ...
+
+class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer):
+    DEFAULT_EXPIRES_IN = ...  # type: int
+    expires_in = ...  # type: int
+    def __init__(self, secret_key: bytes_like, expires_in: Optional[int]=None, **kwargs) -> None: ...
+    def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ...
+    def loads(self, s: str, salt: Optional[bytes_like]=None, return_header=False) -> Any: ...
+    def get_issue_date(self, header: MutableMapping) -> Optional[datetime]: ...
+    def now(self) -> int: ...
+
+class URLSafeSerializerMixin:
+    def load_payload(self, payload: Any, **kwargs) -> Any: ...
+    def dump_payload(self, *args, **kwargs) -> str: ...
+
+class URLSafeSerializer(URLSafeSerializerMixin, Serializer):
+    default_serializer = ...  # type: Any
+
+class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer):
+    default_serializer = ...  # type: Any
diff --git a/typeshed/third_party/2.7/requests/sessions.pyi b/typeshed/third_party/2.7/requests/sessions.pyi
index 1791283..35ea3f7 100644
--- a/typeshed/third_party/2.7/requests/sessions.pyi
+++ b/typeshed/third_party/2.7/requests/sessions.pyi
@@ -1,6 +1,6 @@
-# Stubs for requests.sessions (Python 3)
+# Stubs for requests.sessions (Python 2.7)
 
-from typing import Any, Union, MutableMapping
+from typing import Any, Union, MutableMapping, Text, Optional, IO, Tuple, Callable
 from . import auth
 from . import compat
 from . import cookies
@@ -55,27 +55,39 @@ class SessionRedirectMixin:
 
 class Session(SessionRedirectMixin):
     __attrs__ = ... # type: Any
-    headers = ... # type: MutableMapping[str, str]
-    auth = ... # type: Any
-    proxies = ... # type: Any
-    hooks = ... # type: Any
-    params = ... # type: Any
-    stream = ... # type: Any
-    verify = ... # type: Any
-    cert = ... # type: Any
-    max_redirects = ... # type: Any
-    trust_env = ... # type: Any
-    cookies = ... # type: Any
-    adapters = ... # type: Any
-    redirect_cache = ... # type: Any
+    headers = ... # type: Optional[MutableMapping[Text, Text]]
+    auth = ... # type: Union[None, Tuple[Text, Text], Callable[[Request], Request]]
+    proxies = ... # type: Optional[MutableMapping[Text, Text]]
+    hooks = ... # type: Optional[MutableMapping[Text, Callable[[Request], Any]]]
+    params = ... # type: Union[None, bytes, MutableMapping[Text, Text]]
+    stream = ... # type: bool
+    verify = ... # type: bool
+    cert = ... # type: Union[None, Text, Tuple[Text, Text]]
+    max_redirects = ... # type: int
+    trust_env = ... # type: bool
+    cookies = ... # type: Union[None, RequestsCookieJar, MutableMapping[Text, Text]]
+    adapters = ... # type: MutableMapping
+    redirect_cache = ... # type: RecentlyUsedContainer
     def __init__(self) -> None: ...
     def __enter__(self) -> 'Session': ...
     def __exit__(self, *args) -> None: ...
     def prepare_request(self, request): ...
-    def request(self, method: str, url: str, params=..., data=..., headers=...,
-                cookies=..., files=..., auth=..., timeout=..., allow_redirects=...,
-                proxies=..., hooks=..., stream=..., verify=..., cert=...,
-                json=...) -> Response: ...
+    def request(self, method: str, url: str,
+                params,  # type: Union[None, bytes, MutableMapping[Text, Text]]
+                data,  # type: Union[None, bytes, MutableMapping[Text, Text], IO]
+                headers,  # type: Optional[MutableMapping[Text, Text]]
+                cookies,  # type: Union[None, RequestsCookieJar, MutableMapping[Text, Text]]
+                files,  # type: Optional[MutableMapping[Text, IO]]
+                auth,  # type: Union[None, Tuple[Text, Text], Callable[[Request], Request]]
+                timeout,  # type: Union[None, float, Tuple[float, float]]
+                allow_redirects,  # type: Optional[bool]
+                proxies,  # type: Optional[MutableMapping[Text, Text]]
+                hooks,  # type: Optional[MutableMapping[Text, Callable[[Request], Any]]]
+                stream,  # type: Optional[bool]
+                verify,  # type: Optional[bool]
+                cert,  # type: Union[Text, Tuple[Text, Text], None]
+                json  # type: Optional[MutableMapping]
+                ) -> Response: ...
     def get(self, url: str, **kwargs) -> Response: ...
     def options(self, url: str, **kwargs) -> Response: ...
     def head(self, url: str, **kwargs) -> Response: ...
diff --git a/typeshed/third_party/2.7/six/__init__.pyi b/typeshed/third_party/2.7/six/__init__.pyi
index 0f6acfc..5f5383b 100644
--- a/typeshed/third_party/2.7/six/__init__.pyi
+++ b/typeshed/third_party/2.7/six/__init__.pyi
@@ -77,7 +77,7 @@ def assertRaisesRegex(self: unittest.TestCase, msg: str = ...) -> Any: ...
 def assertRaisesRegex(self: unittest.TestCase, callable_obj: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ...
 def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: str = ...) -> None: ...
 
-def reraise(tp: type, value: Optional[BaseException], tb: types.TracebackType = ...) -> None: ...
+def reraise(tp: type, value: Optional[BaseException], tb: Optional[types.TracebackType] = ...) -> None: ...
 def exec_(_code_: Union[unicode, types.CodeType], _globs_: Dict[str, Any] = ..., _locs_: Dict[str, Any] = ...): ...
 def raise_from(value: BaseException, from_value: BaseException) -> None: ...
 
diff --git a/typeshed/third_party/2and3/ujson.pyi b/typeshed/third_party/2and3/ujson.pyi
index 147f67a..37697af 100644
--- a/typeshed/third_party/2and3/ujson.pyi
+++ b/typeshed/third_party/2and3/ujson.pyi
@@ -1,6 +1,6 @@
 # Stubs for ujson
 # See: https://pypi.python.org/pypi/ujson
-from typing import Any, IO, Optional
+from typing import Any, AnyStr, IO, Optional
 
 __version__ = ...  # type: str
 
@@ -32,14 +32,14 @@ def dump(obj: Any,
     indent: int = ...,
     ) -> None: ...
 
-def decode(s: str,
+def decode(s: AnyStr,
     precise_float: bool = ...,
     ) -> Any: ...
 
-def loads(s: str,
+def loads(s: AnyStr,
     precise_float: bool = ...,
     ) -> Any: ...
 
-def load(fp: IO[str],
+def load(fp: IO[AnyStr],
     precise_float: bool = ...,
     ) -> Any: ...
diff --git a/typeshed/third_party/3/enum.pyi b/typeshed/third_party/3/enum.pyi
index 4032e81..ac04a4a 100644
--- a/typeshed/third_party/3/enum.pyi
+++ b/typeshed/third_party/3/enum.pyi
@@ -12,7 +12,8 @@ class Enum:
     name = ...  # type: str
     value = ...  # type: Any
 
-class IntEnum(int, Enum): ...
+class IntEnum(int, Enum):
+    value = ...  # type: int
 
 _T = TypeVar('_T')
 
diff --git a/typeshed/third_party/3/itsdangerous.pyi b/typeshed/third_party/3/itsdangerous.pyi
new file mode 100644
index 0000000..067fbdd
--- /dev/null
+++ b/typeshed/third_party/3/itsdangerous.pyi
@@ -0,0 +1,156 @@
+# Stubs for itsdangerous (Python 3)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from datetime import datetime
+from typing import Any, Callable, IO, MutableMapping, Optional, Text, Tuple, TypeVar, Union
+
+PY2 = ...  # type: bool
+text_type = str
+int_to_byte = Callable[[int], bytes]
+number_types = (int, float)
+izip = zip
+
+bytes_like = Union[bytearray, bytes]
+str_like = Union[str, bytes]
+can_become_bytes = Union[str, bytes, bytearray]
+comparable_bytes = TypeVar('comparable_bytes', str, Union[bytes, bytearray])
+
+class _CompactJSON:
+    def loads(self, payload: Text) -> Any: ...
+    def dumps(self, obj: Any) -> Text: ...
+
+compact_json = _CompactJSON
+EPOCH = ...  # type: int
+
+def want_bytes(s: can_become_bytes, encoding='', errors='') -> bytes: ...
+def is_text_serializer(serializer: Any) -> bool: ...
+def constant_time_compare(val1: comparable_bytes, val2: comparable_bytes) -> bool: ...
+
+class BadData(Exception):
+    message = ...  # type: str
+    def __init__(self, message: str) -> None: ...
+
+class BadPayload(BadData):
+    original_error = ...  # type: Optional[Exception]
+    def __init__(self, message: str, original_error: Optional[Exception]=None) -> None: ...
+
+class BadSignature(BadData):
+    payload = ...  # type: Optional[Any]
+    def __init__(self, message: str, payload: Optional[Any]=None) -> None: ...
+
+class BadTimeSignature(BadSignature):
+    date_signed = ...  # type: Optional[int]
+    def __init__(self, message, payload: Optional[Any]=None, date_signed: Optional[int]=None) -> None: ...
+
+class BadHeader(BadSignature):
+    header = ...  # type: Any
+    original_error = ...  # type: Any
+    def __init__(self, message, payload=None, header=None, original_error=None) -> None: ...
+
+class SignatureExpired(BadTimeSignature): ...
+
+def base64_encode(string: can_become_bytes) -> bytes: ...
+def base64_decode(string: can_become_bytes) -> bytes: ...
+def int_to_bytes(num: int) -> bytes: ...
+def bytes_to_int(bytestr: can_become_bytes) -> bytes: ...
+
+class SigningAlgorithm:
+    def get_signature(self, key: bytes_like, value: bytes_like) -> bytes: ...
+    def verify_signature(self, key: bytes_like, value: bytes_like, sig: can_become_bytes) -> bool: ...
+
+class NoneAlgorithm(SigningAlgorithm):
+    def get_signature(self, key: bytes_like, value: bytes_like) -> bytes: ...
+
+class HMACAlgorithm(SigningAlgorithm):
+    default_digest_method = ...  # type: Callable
+    digest_method = ...  # type: Callable
+    def __init__(self, digest_method: Optional[Callable]=None) -> None: ...
+    def get_signature(self, key: bytes_like, value: bytes_like) -> bytes: ...
+
+class Signer:
+    default_digest_method = ...  # type: Callable
+    default_key_derivation = ...  # type: str
+    secret_key = ...  # type: can_become_bytes
+    sep = ...  # type: can_become_bytes
+    salt = ...  # type: can_become_bytes
+    key_derivation = ...  # type: str
+    digest_method = ...  # type: Callable
+    algorithm = ...  # type: SigningAlgorithm
+    def __init__(self, secret_key: can_become_bytes, salt: Optional[can_become_bytes]=None, sep: Optional[can_become_bytes]='',
+                 key_derivation: Optional[str]=None,
+                 digest_method: Optional[Callable]=None,
+                 algorithm: Optional[SigningAlgorithm]=None) -> None: ...
+    def derive_key(self) -> bytes: ...
+    def get_signature(self, value: bytes_like) -> bytes: ...
+    def sign(self, value: bytes_like) -> bytes: ...
+    def verify_signature(self, value: bytes_like, sig: can_become_bytes) -> bool: ...
+    def unsign(self, signed_value: can_become_bytes) -> str: ...
+    def validate(self, signed_value: can_become_bytes) -> bool: ...
+
+class TimestampSigner(Signer):
+    def get_timestamp(self) -> int: ...
+    def timestamp_to_datetime(self, ts: int) -> datetime: ...
+    def sign(self, value: bytes_like) -> bytes: ...
+    def unsign(self, value: can_become_bytes, max_age: Optional[int]=None, return_timestamp=False) -> Any: ...
+    def validate(self, signed_value: can_become_bytes, max_age: Optional[int]=None) -> bool: ...
+
+class Serializer:
+    default_serializer = ...  # type: Any
+    default_signer = ...  # type: Callable[..., Signer]
+    secret_key = ...  # type: Any
+    salt = ...  # type: can_become_bytes
+    serializer = ...  # type: Any
+    is_text_serializer = ...  # type: bool
+    signer = ...  # type: Signer
+    signer_kwargs = ...  # type: MutableMapping
+    def __init__(self, secret_key: can_become_bytes, salt: Optional[can_become_bytes]=b'', serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None) -> None: ...
+    def load_payload(self, payload: Any, serializer=None) -> Any: ...
+    def dump_payload(self, *args, **kwargs) -> bytes: ...
+    def make_signer(self, salt: Optional[can_become_bytes]=None) -> Signer: ...
+    def dumps(self, obj: Any, salt: Optional[can_become_bytes]=None) -> str_like: ...
+    def dump(self, obj: Any, f: IO, salt: Optional[can_become_bytes]=None) -> None: ...
+    def loads(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None) -> Any: ...
+    def load(self, f: IO, salt: Optional[can_become_bytes]=None): ...
+    def loads_unsafe(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None) -> Tuple[bool, Any]: ...
+    def load_unsafe(self, f: IO, *args, **kwargs) -> Tuple[bool, Any]: ...
+
+class TimedSerializer(Serializer):
+    default_signer = ...  # type: Callable[..., TimestampSigner]
+    def loads(self, s: can_become_bytes,  salt: Optional[can_become_bytes]=None, max_age: Optional[int]=None, return_timestamp=False) -> Any: ...
+    def loads_unsafe(self, s: can_become_bytes,  salt: Optional[can_become_bytes]=None, max_age: Optional[int]=None) -> Tuple[bool, Any]: ...
+
+class JSONWebSignatureSerializer(Serializer):
+    jws_algorithms = ...  # type: MutableMapping[str, SigningAlgorithm]
+    default_algorithm = ...  # type: str
+    default_serializer = ...  # type: Any
+    algorithm_name = ...  # type: str
+    algorithm = ...  # type: Any
+    def __init__(self, secret_key: can_become_bytes, salt: Optional[can_become_bytes]=None, serializer=None, signer: Optional[Callable[..., Signer]]=None, signer_kwargs: Optional[MutableMapping]=None, algorithm_name: Optional[str]=None) -> None: ...
+    def load_payload(self, payload: Any, return_header=False) -> Any: ...
+    def dump_payload(self, *args, **kwargs) -> bytes: ...
+    def make_algorithm(self, algorithm_name: str) -> SigningAlgorithm: ...
+    def make_signer(self, salt: Optional[can_become_bytes]=None, algorithm_name: Optional[str]=None) -> Signer: ...
+    def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ...
+    def dumps(self, obj: Any, salt: Optional[can_become_bytes]=None, header_fields=Optional[MutableMapping]) -> str: ...
+    def loads(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None, return_header=False) -> Any: ...
+    def loads_unsafe(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None, return_header=False) -> Tuple[bool, Any]: ...
+
+class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer):
+    DEFAULT_EXPIRES_IN = ...  # type: int
+    expires_in = ...  # type: int
+    def __init__(self, secret_key: can_become_bytes, expires_in: Optional[int]=None, **kwargs) -> None: ...
+    def make_header(self, header_fields=Optional[MutableMapping]) -> MutableMapping: ...
+    def loads(self, s: can_become_bytes, salt: Optional[can_become_bytes]=None, return_header=False) -> Any: ...
+    def get_issue_date(self, header: MutableMapping) -> Optional[datetime]: ...
+    def now(self) -> int: ...
+
+class URLSafeSerializerMixin:
+    def load_payload(self, payload: Any, **kwargs) -> Any: ...
+    def dump_payload(self, *args, **kwargs) -> bytes: ...
+
+class URLSafeSerializer(URLSafeSerializerMixin, Serializer):
+    default_serializer = ...  # type: Any
+
+class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer):
+    default_serializer = ...  # type: Any
diff --git a/typeshed/third_party/3/requests/sessions.pyi b/typeshed/third_party/3/requests/sessions.pyi
index 9c17ed5..9bc3a0e 100644
--- a/typeshed/third_party/3/requests/sessions.pyi
+++ b/typeshed/third_party/3/requests/sessions.pyi
@@ -1,6 +1,6 @@
 # Stubs for requests.sessions (Python 3)
 
-from typing import Any, Union, MutableMapping
+from typing import Any, Union, MutableMapping, Text, Optional, IO, Tuple, Callable
 from . import auth
 from . import compat
 from . import cookies
@@ -55,27 +55,39 @@ class SessionRedirectMixin:
 
 class Session(SessionRedirectMixin):
     __attrs__ = ... # type: Any
-    headers = ... # type: MutableMapping[str, str]
-    auth = ... # type: Any
-    proxies = ... # type: Any
-    hooks = ... # type: Any
-    params = ... # type: Any
-    stream = ... # type: Any
-    verify = ... # type: Any
-    cert = ... # type: Any
-    max_redirects = ... # type: Any
-    trust_env = ... # type: Any
-    cookies = ... # type: Any
-    adapters = ... # type: Any
-    redirect_cache = ... # type: Any
+    headers = ... # type: Optional[MutableMapping[Text, Text]]
+    auth = ... # type: Union[None, Tuple[Text, Text], Callable[[Request], Request]]
+    proxies = ... # type: Optional[MutableMapping[Text, Text]]
+    hooks = ... # type: Optional[MutableMapping[Text, Callable[[Request], Any]]]
+    params = ... # type: Union[None, bytes, MutableMapping[Text, Text]]
+    stream = ... # type: bool
+    verify = ... # type: bool
+    cert = ... # type: Union[None, Text, Tuple[Text, Text]]
+    max_redirects = ... # type: int
+    trust_env = ... # type: bool
+    cookies = ... # type: Union[None, RequestsCookieJar, MutableMapping[Text, Text]]
+    adapters = ... # type: MutableMapping
+    redirect_cache = ... # type: RecentlyUsedContainer
     def __init__(self) -> None: ...
     def __enter__(self) -> 'Session': ...
     def __exit__(self, *args) -> None: ...
     def prepare_request(self, request): ...
-    def request(self, method: str, url: Union[str, bytes], params=..., data=..., headers=...,
-                cookies=..., files=..., auth=..., timeout=..., allow_redirects=...,
-                proxies=..., hooks=..., stream=..., verify=..., cert=...,
-                json=...) -> Response: ...
+    def request(self, method: str, url: str,
+                params,  # type: Union[None, bytes, MutableMapping[Text, Text]]
+                data,  # type: Union[None, bytes, MutableMapping[Text, Text], IO]
+                headers,  # type: Optional[MutableMapping[Text, Text]]
+                cookies,  # type: Union[None, RequestsCookieJar, MutableMapping[Text, Text]]
+                files,  # type: Optional[MutableMapping[Text, IO]]
+                auth,  # type: Union[None, Tuple[Text, Text], Callable[[Request], Request]]
+                timeout,  # type: Union[None, float, Tuple[float, float]]
+                allow_redirects,  # type: Optional[bool]
+                proxies,  # type: Optional[MutableMapping[Text, Text]]
+                hooks,  # type: Optional[MutableMapping[Text, Callable[[Request], Any]]]
+                stream,  # type: Optional[bool]
+                verify,  # type: Optional[bool]
+                cert,  # type: Union[Text, Tuple[Text, Text], None]
+                json  # type: Optional[MutableMapping]
+                ) -> Response: ...
     def get(self, url: Union[str, bytes], **kwargs) -> Response: ...
     def options(self, url: Union[str, bytes], **kwargs) -> Response: ...
     def head(self, url: Union[str, bytes], **kwargs) -> Response: ...
diff --git a/typeshed/third_party/3/six/__init__.pyi b/typeshed/third_party/3/six/__init__.pyi
index 0607cc1..0e7eb93 100644
--- a/typeshed/third_party/3/six/__init__.pyi
+++ b/typeshed/third_party/3/six/__init__.pyi
@@ -91,7 +91,7 @@ def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[Any
 
 exec_ = exec
 
-def reraise(tp: type, value: Optional[BaseException], tb: types.TracebackType = None) -> None: ...
+def reraise(tp: type, value: Optional[BaseException], tb: Optional[types.TracebackType] = None) -> None: ...
 def raise_from(value: BaseException, from_value: BaseException) -> None: ...
 
 print_ = print
diff --git a/typeshed/third_party/3/typed_ast/ast27.pyi b/typeshed/third_party/3/typed_ast/ast27.pyi
index 40b44da..8e88972 100644
--- a/typeshed/third_party/3/typed_ast/ast27.pyi
+++ b/typeshed/third_party/3/typed_ast/ast27.pyi
@@ -347,6 +347,7 @@ class arguments(AST):
     vararg = ...  # type: Optional[identifier]
     kwarg = ...  # type: Optional[identifier]
     defaults = ...  # type: typing.List[expr]
+    type_comments = ...  # type: typing.List[str]
 
 class keyword(AST):
     arg = ...  # type: identifier
diff --git a/typeshed/third_party/3/typed_ast/ast35.pyi b/typeshed/third_party/3/typed_ast/ast35.pyi
index 738ac7d..04e8a29 100644
--- a/typeshed/third_party/3/typed_ast/ast35.pyi
+++ b/typeshed/third_party/3/typed_ast/ast35.pyi
@@ -88,8 +88,9 @@ class Delete(stmt):
 
 class Assign(stmt):
     targets = ...  # type: typing.List[expr]
-    value = ...  # type: expr
+    value = ...  # type: Optional[expr]
     type_comment = ...  # type: Optional[str]
+    annotation = ...  # type: Optional[expr]
 
 class AugAssign(stmt):
     target = ...  # type: expr

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git



More information about the debian-med-commit mailing list