[med-svn] [mypy] 01/03: New upstream version 0.530

Michael Crusoe misterc-guest at moszumanska.debian.org
Sat Oct 7 17:19:26 UTC 2017


This is an automated email from the git hooks/post-receive script.

misterc-guest pushed a commit to branch master
in repository mypy.

commit 854532612fa614ab63d50c6c5ecd85d92ab8188a
Author: Michael R. Crusoe <michael.crusoe at gmail.com>
Date:   Sat Oct 7 10:15:38 2017 -0700

    New upstream version 0.530
---
 .gitignore                                         |   30 -
 .gitmodules                                        |    3 -
 .travis.yml                                        |   23 -
 CONTRIBUTING.md                                    |  163 --
 CREDITS                                            |  101 -
 LICENSE                                            |  227 --
 MANIFEST.in                                        |    6 +-
 PKG-INFO                                           |    5 +-
 README.md                                          |   58 +-
 ROADMAP.md                                         |   96 -
 appveyor.yml                                       |   41 -
 build-requirements.txt                             |    2 -
 conftest.py                                        |   13 -
 docs/source/basics.rst                             |    2 +-
 docs/source/class_basics.rst                       |  179 +-
 docs/source/command_line.rst                       |    6 +
 docs/source/common_issues.rst                      |   42 +
 docs/source/config_file.rst                        |   15 +-
 docs/source/faq.rst                                |   37 +-
 docs/source/function_overloading.rst               |   25 +-
 docs/source/generics.rst                           |  103 +-
 docs/source/revision_history.rst                   |   14 +
 docs/source/type_inference_and_annotations.rst     |   16 +
 extensions/setup.cfg                               |    2 +
 extensions/setup.py                                |    4 +-
 misc/actions_stubs.py                              |  111 -
 misc/analyze_cache.py                              |  189 --
 misc/async_matrix.py                               |  120 --
 misc/fix_annotate.py                               |  219 --
 misc/incremental_checker.py                        |  356 ----
 misc/macs.el                                       |   22 -
 misc/perf_checker.py                               |   93 -
 misc/remove-eol-whitespace.sh                      |    8 -
 misc/test_case_to_actual.py                        |   71 -
 misc/touch_checker.py                              |  151 --
 misc/upload-pypi.py                                |  160 --
 misc/variadics.py                                  |   54 -
 mypy.egg-info/PKG-INFO                             |    5 +-
 mypy.egg-info/SOURCES.txt                          |   61 +-
 mypy.egg-info/requires.txt                         |    5 +-
 mypy/applytype.py                                  |   24 +-
 mypy/binder.py                                     |   63 +-
 mypy/build.py                                      |  141 +-
 mypy/checker.py                                    |  453 ++--
 mypy/checkexpr.py                                  |  477 +++--
 mypy/checkmember.py                                |   47 +-
 mypy/checkstrformat.py                             |   14 +-
 mypy/constraints.py                                |   93 +-
 mypy/defaults.py                                   |    1 +
 mypy/erasetype.py                                  |   12 +-
 mypy/errors.py                                     |   41 +-
 mypy/exprtotype.py                                 |    4 +-
 mypy/fastparse.py                                  |  180 +-
 mypy/fastparse2.py                                 |  107 +-
 mypy/indirection.py                                |    3 +
 mypy/infer.py                                      |    4 +-
 mypy/join.py                                       |   35 +-
 mypy/literals.py                                   |  233 +++
 mypy/main.py                                       |   45 +-
 mypy/maptype.py                                    |    8 +-
 mypy/meet.py                                       |   27 +-
 mypy/messages.py                                   |  656 ++++--
 mypy/myunit/__init__.py                            |   26 +-
 mypy/nodes.py                                      |  297 +--
 mypy/options.py                                    |   21 +-
 mypy/parse.py                                      |    2 +-
 mypy/plugin.py                                     |    4 +-
 mypy/report.py                                     |  130 +-
 mypy/sametypes.py                                  |    4 +-
 mypy/semanal.py                                    |  830 ++++++--
 mypy/server/astdiff.py                             |    9 +-
 mypy/server/astmerge.py                            |   16 +-
 mypy/server/aststrip.py                            |   14 +-
 mypy/server/deps.py                                |   19 +-
 mypy/server/update.py                              |   14 +-
 mypy/sharedparse.py                                |    6 +-
 mypy/solve.py                                      |    8 +-
 mypy/stats.py                                      |  132 +-
 mypy/strconv.py                                    |   10 +-
 mypy/stubgen.py                                    |  389 +++-
 mypy/stubgenc.py                                   |    6 +-
 mypy/subtypes.py                                   |  344 ++-
 mypy/test/data.py                                  |   13 +-
 mypy/test/helpers.py                               |    1 +
 mypy/test/testcheck.py                             |    7 +-
 mypy/test/testcmdline.py                           |   11 +-
 mypy/test/testdeps.py                              |    2 -
 mypy/test/testdiff.py                              |    2 -
 mypy/test/testfinegrained.py                       |    3 -
 mypy/test/testinfer.py                             |   18 +-
 mypy/test/testmerge.py                             |   11 +-
 mypy/test/testparse.py                             |   20 +-
 mypy/test/testpythoneval.py                        |   25 +-
 mypy/test/testsemanal.py                           |   37 +-
 mypy/test/teststubgen.py                           |   12 +-
 mypy/test/testtransform.py                         |   14 +-
 mypy/test/testtypegen.py                           |   14 +-
 mypy/test/testtypes.py                             |   51 +-
 mypy/treetransform.py                              |   10 +-
 mypy/tvar_scope.py                                 |    6 +-
 mypy/typeanal.py                                   |  263 ++-
 mypy/typefixture.py                                |   18 +-
 mypy/types.py                                      |  283 ++-
 mypy/version.py                                    |    2 +-
 mypy/waiter.py                                     |   31 +-
 mypy_self_check.ini                                |   11 +-
 pinfer/.gitignore                                  |    3 -
 pinfer/LICENSE                                     |   27 -
 pinfer/README                                      |   47 -
 pinfer/inspect3.py                                 |  122 --
 pinfer/p.py                                        |   83 -
 pinfer/pinfer.py                                   |  686 ------
 pinfer/test_pinfer.py                              |  302 ---
 pinfer/test_pinfer3.py                             |   31 -
 pinfer/unparse.py                                  |  610 ------
 pinfer/unparse3.py                                 |  610 ------
 pytest.ini                                         |   21 -
 runtests.py                                        |  112 +-
 scripts/__pycache__/dumpmodule.cpython-36.pyc      |  Bin 3853 -> 0 bytes
 scripts/myunit                                     |    9 +
 setup.cfg                                          |    5 -
 setup.py                                           |   23 +-
 test-data/unit/README.md                           |   32 +-
 test-data/unit/check-abstract.test                 |   21 +-
 test-data/unit/check-async-await.test              |   46 +-
 test-data/unit/check-bound.test                    |   10 +-
 test-data/unit/check-class-namedtuple.test         |    8 +-
 test-data/unit/check-classes.test                  |  570 ++++-
 test-data/unit/check-classvar.test                 |    2 +-
 test-data/unit/check-custom-plugin.test            |    2 +-
 test-data/unit/check-default-plugin.test           |   33 +
 test-data/unit/check-dynamic-typing.test           |   64 +-
 test-data/unit/check-enum.test                     |   20 +-
 test-data/unit/check-expressions.test              |  119 +-
 test-data/unit/check-flags.test                    |  151 +-
 test-data/unit/check-functions.test                |  215 +-
 test-data/unit/check-generic-subtyping.test        |   66 +-
 test-data/unit/check-generics.test                 |  126 +-
 test-data/unit/check-ignore.test                   |    2 +-
 test-data/unit/check-incremental.test              |  324 +++
 test-data/unit/check-inference-context.test        |   94 +-
 test-data/unit/check-inference.test                |  134 +-
 test-data/unit/check-isinstance.test               |   48 +-
 test-data/unit/check-kwargs.test                   |   26 +-
 test-data/unit/check-lists.test                    |    2 +-
 test-data/unit/check-modules.test                  |  358 +++-
 test-data/unit/check-namedtuple.test               |  189 +-
 test-data/unit/check-newsyntax.test                |   14 +-
 test-data/unit/check-newtype.test                  |   24 +-
 test-data/unit/check-optional.test                 |   61 +-
 test-data/unit/check-overloading.test              |  135 +-
 test-data/unit/check-protocols.test                | 2181 ++++++++++++++++++++
 test-data/unit/check-python2.test                  |   28 +-
 test-data/unit/check-serialize.test                |    2 +-
 test-data/unit/check-statements.test               |   81 +-
 test-data/unit/check-super.test                    |  190 +-
 test-data/unit/check-tuples.test                   |   37 +-
 test-data/unit/check-type-aliases.test             |  126 +-
 test-data/unit/check-type-checks.test              |    2 +-
 test-data/unit/check-typeddict.test                |  131 +-
 test-data/unit/check-typevar-values.test           |   81 +-
 test-data/unit/check-unions.test                   |   19 +
 test-data/unit/check-unreachable-code.test         |    2 +-
 test-data/unit/check-varargs.test                  |  115 +-
 test-data/unit/check-warnings.test                 |   17 +-
 test-data/unit/cmdline.test                        |   49 +-
 test-data/unit/fixtures/async_await.pyi            |    9 +-
 test-data/unit/fixtures/bool.pyi                   |    4 +-
 test-data/unit/fixtures/dict.pyi                   |    9 +-
 test-data/unit/fixtures/exception.pyi              |    4 +-
 test-data/unit/fixtures/fine_grained.pyi           |    7 +-
 test-data/unit/fixtures/float.pyi                  |    5 +-
 test-data/unit/fixtures/floatdict.pyi              |    2 +-
 test-data/unit/fixtures/for.pyi                    |    2 +-
 test-data/unit/fixtures/isinstancelist.pyi         |   12 +-
 test-data/unit/fixtures/list.pyi                   |    2 +-
 test-data/unit/fixtures/module.pyi                 |    2 +-
 test-data/unit/fixtures/module_all.pyi             |    2 +-
 test-data/unit/fixtures/module_all_python2.pyi     |    2 +-
 test-data/unit/fixtures/primitives.pyi             |    4 +-
 test-data/unit/fixtures/set.pyi                    |    2 +-
 test-data/unit/fixtures/slice.pyi                  |    4 +-
 test-data/unit/fixtures/type.pyi                   |    2 +-
 test-data/unit/fixtures/typing-full.pyi            |   64 +-
 test-data/unit/fixtures/union.pyi                  |    7 +-
 test-data/unit/lib-stub/builtins.pyi               |    1 -
 test-data/unit/lib-stub/contextlib.pyi             |   10 +
 test-data/unit/lib-stub/six.pyi                    |    3 +-
 test-data/unit/lib-stub/typing.pyi                 |   43 +-
 test-data/unit/lib-stub/typing_extensions.pyi      |    6 +
 test-data/unit/parse.test                          |   12 +-
 test-data/unit/plugins/type_anal_hook.py           |    6 +-
 test-data/unit/pythoneval-asyncio.test             |   10 +-
 test-data/unit/pythoneval.test                     |   44 +-
 test-data/unit/reports.test                        |  338 +++
 test-data/unit/semanal-abstractclasses.test        |    8 +-
 test-data/unit/semanal-classes.test                |    4 +-
 test-data/unit/semanal-errors.test                 |   19 +-
 test-data/unit/semanal-typeddict.test              |   21 +
 test-data/unit/stubgen.test                        |  271 ++-
 test-requirements.txt                              |    9 -
 tmp-test-dirs/.gitignore                           |    4 -
 typeshed/stdlib/2/ConfigParser.pyi                 |    4 +-
 typeshed/stdlib/2/__builtin__.pyi                  |   42 +-
 typeshed/stdlib/2/_io.pyi                          |    2 +-
 typeshed/stdlib/2/ast.pyi                          |   15 +-
 typeshed/stdlib/2/builtins.pyi                     |   42 +-
 typeshed/stdlib/2/collections.pyi                  |    2 +-
 typeshed/stdlib/2/email/mime/application.pyi       |   11 +
 typeshed/stdlib/2/encodings/utf_8.pyi              |   17 +-
 typeshed/stdlib/2/exceptions.pyi                   |  128 +-
 typeshed/stdlib/2/fcntl.pyi                        |    4 +-
 typeshed/stdlib/2/functools.pyi                    |    2 +-
 typeshed/stdlib/2/heapq.pyi                        |    5 +-
 typeshed/stdlib/2/io.pyi                           |    4 +-
 typeshed/stdlib/2/itertools.pyi                    |    4 +-
 typeshed/stdlib/2/mutex.pyi                        |   15 +
 typeshed/stdlib/2/os/__init__.pyi                  |    2 +
 typeshed/stdlib/2/sre_constants.pyi                |   94 +
 typeshed/stdlib/2/sre_parse.pyi                    |   63 +
 typeshed/stdlib/2/stringold.pyi                    |   46 +
 typeshed/stdlib/2/symbol.pyi                       |    2 +-
 typeshed/stdlib/2/thread.pyi                       |    2 +-
 typeshed/stdlib/2/types.pyi                        |    3 +-
 typeshed/stdlib/2/typing.pyi                       |    5 +
 typeshed/stdlib/2and3/argparse.pyi                 |    2 +
 typeshed/stdlib/2and3/chunk.pyi                    |   23 +
 typeshed/stdlib/2and3/codeop.pyi                   |   17 +
 .../stdlib/2and3/distutils/command/bdist_msi.pyi   |    7 +-
 .../stdlib/2and3/distutils/command/build_py.pyi    |   10 +-
 typeshed/stdlib/2and3/formatter.pyi                |  105 +
 typeshed/stdlib/2and3/ftplib.pyi                   |   10 +-
 typeshed/stdlib/2and3/logging/__init__.pyi         |    3 +
 typeshed/stdlib/2and3/logging/handlers.pyi         |    2 +-
 typeshed/stdlib/2and3/mmap.pyi                     |   35 +-
 typeshed/stdlib/2and3/socket.pyi                   |    2 +-
 typeshed/stdlib/2and3/traceback.pyi                |   51 +-
 typeshed/stdlib/3.4/asyncio/locks.pyi              |    4 +-
 typeshed/stdlib/3.4/asyncio/streams.pyi            |   15 +-
 typeshed/stdlib/3.4/asyncio/tasks.pyi              |   31 +-
 typeshed/stdlib/3.4/enum.pyi                       |    7 +-
 typeshed/stdlib/3/_threading_local.pyi             |   18 +
 typeshed/stdlib/3/ast.pyi                          |   15 +-
 typeshed/stdlib/3/builtins.pyi                     |   67 +-
 typeshed/stdlib/3/collections/__init__.pyi         |  127 +-
 typeshed/stdlib/3/configparser.pyi                 |   39 +-
 typeshed/stdlib/3/email/mime/__init__.py           |    0
 .../stdlib/3/email/mime/__init__.pyi               |    0
 typeshed/stdlib/3/email/policy.pyi                 |    3 +-
 typeshed/stdlib/3/email/utils.pyi                  |    4 +-
 typeshed/stdlib/3/encodings/utf_8.pyi              |   17 +-
 typeshed/stdlib/3/fcntl.pyi                        |    4 +-
 typeshed/stdlib/3/functools.pyi                    |    2 +-
 typeshed/stdlib/3/gzip.pyi                         |   22 +-
 typeshed/stdlib/3/heapq.pyi                        |    4 +-
 typeshed/stdlib/3/http/cookiejar.pyi               |    3 +-
 typeshed/stdlib/3/io.pyi                           |   31 +-
 typeshed/stdlib/3/json/__init__.pyi                |    6 +-
 typeshed/stdlib/3/json/decoder.pyi                 |    8 +-
 typeshed/stdlib/3/json/encoder.pyi                 |    2 +-
 typeshed/stdlib/3/multiprocessing/__init__.pyi     |   94 +-
 typeshed/stdlib/3/multiprocessing/context.pyi      |   21 +-
 typeshed/stdlib/3/multiprocessing/managers.pyi     |    7 +-
 typeshed/stdlib/3/multiprocessing/pool.pyi         |   29 +-
 typeshed/stdlib/3/os/__init__.pyi                  |   29 +-
 typeshed/stdlib/3/os/path.pyi                      |    5 +-
 typeshed/stdlib/3/queue.pyi                        |    8 +-
 typeshed/stdlib/3/re.pyi                           |  122 +-
 typeshed/stdlib/3/resource.pyi                     |   12 +-
 typeshed/stdlib/3/shlex.pyi                        |   15 +-
 typeshed/stdlib/3/smtplib.pyi                      |   69 +-
 typeshed/stdlib/3/sre_constants.pyi                |   40 +
 typeshed/stdlib/3/sre_parse.pyi                    |   81 +
 typeshed/stdlib/3/ssl.pyi                          |   22 +
 typeshed/stdlib/3/subprocess.pyi                   |    7 +-
 typeshed/stdlib/3/sys.pyi                          |   50 +-
 typeshed/stdlib/3/time.pyi                         |    2 +-
 typeshed/stdlib/3/tkinter/__init__.pyi             |   16 +-
 typeshed/stdlib/3/types.pyi                        |    1 +
 typeshed/stdlib/3/typing.pyi                       |   18 +-
 typeshed/stdlib/3/unittest/__init__.pyi            |    9 +-
 typeshed/stdlib/3/unittest/mock.pyi                |   13 +-
 typeshed/stdlib/3/urllib/parse.pyi                 |   18 +-
 typeshed/tests/mypy_selftest.py                    |    2 +-
 typeshed/third_party/2/simplejson/__init__.pyi     |    6 +-
 typeshed/third_party/2/six/__init__.pyi            |    2 +-
 typeshed/third_party/2and3/click/__init__.pyi      |   94 +-
 typeshed/third_party/2and3/click/globals.pyi       |    3 +-
 typeshed/third_party/2and3/emoji.pyi               |   18 +
 typeshed/third_party/2and3/jinja2/__init__.pyi     |    2 +-
 typeshed/third_party/2and3/jinja2/environment.pyi  |    2 +-
 typeshed/third_party/2and3/jinja2/utils.pyi        |    5 +-
 typeshed/third_party/2and3/pynamodb/attributes.pyi |    4 +-
 typeshed/third_party/2and3/pytz/lazy.pyi           |   19 +-
 typeshed/third_party/2and3/requests/adapters.pyi   |    2 +-
 typeshed/third_party/2and3/requests/auth.pyi       |    4 +-
 typeshed/third_party/2and3/requests/models.pyi     |    2 +-
 typeshed/third_party/2and3/requests/structures.pyi |    7 +-
 typeshed/third_party/2and3/typing_extensions.pyi   |   33 +
 typeshed/third_party/3/enum.pyi                    |    7 +-
 typeshed/third_party/3/lxml/etree.pyi              |   10 +-
 typeshed/third_party/3/six/__init__.pyi            |    2 +-
 xml/mypy-html.xslt                                 |    2 +-
 xml/mypy.xsd                                       |    1 +
 304 files changed, 12403 insertions(+), 7976 deletions(-)

diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index 2d7c8d4..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,30 +0,0 @@
-build/
-__pycache__
-*.py[cod]
-*~
-@*
-/build
-/env
-docs/build/
-*.iml
-/out/
-.venv/
-.mypy_cache/
-.incremental_checker_cache.json
-.cache
-.runtest_log.json
-
-# Packages
-*.egg
-*.egg-info
-
-# IDEs
-.idea
-*.swp
-
-# Operating Systems
-.DS_store
-
-# Coverage Files
-htmlcov
-.coverage*
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index 6b366ad..0000000
--- a/.gitmodules
+++ /dev/null
@@ -1,3 +0,0 @@
-[submodule "typeshed"]
-	path = typeshed
-	url = http://github.com/python/typeshed
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index ed74d8a..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-sudo: false
-language: python
-# cache package wheels (1 cache per python version)
-cache: pip
-python:
-  - "3.3"
-  - "3.4"
-  # Specifically request 3.5.1 because we need to be compatible with that.
-  - "3.5.1"
-  - "3.6"
-  - "3.7-dev"
-  # Pypy build is disabled because it doubles the travis build time, and it rarely fails
-  # unless one one of the other builds fails.
-  # - "pypy3"
-
-install:
-  - pip install -r test-requirements.txt
-  - python2 -m pip install --user typing
-  - python setup.py install
-
-script:
-  - python runtests.py -j16 -x lint
-  - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
deleted file mode 100644
index f7d5849..0000000
--- a/CONTRIBUTING.md
+++ /dev/null
@@ -1,163 +0,0 @@
-Contributing to Mypy
-====================
-
-Welcome!  Mypy is a community project that aims to work for a wide
-range of Python users and Python codebases.  If you're trying Mypy on
-your Python code, your experience and what you can contribute are
-important to the project's success.
-
-
-Getting started, building, and testing
---------------------------------------
-
-If you haven't already, take a look at the project's
-[README.md file](README.md)
-and the [Mypy documentation](http://mypy.readthedocs.io/en/latest/),
-and try adding type annotations to your file and type-checking it with Mypy.
-
-
-Discussion
-----------
-
-If you've run into behavior in Mypy you don't understand, or you're
-having trouble working out a good way to apply it to your code, or
-you've found a bug or would like a feature it doesn't have, we want to
-hear from you!
-
-Our main forum for discussion is the project's [GitHub issue
-tracker](https://github.com/python/mypy/issues).  This is the right
-place to start a discussion of any of the above or most any other
-topic concerning the project.
-
-For less formal discussion we have a chat room on
-[gitter.im](https://gitter.im/python/typing).  Some Mypy core developers
-are almost always present; feel free to find us there and we're happy
-to chat.  Substantive technical discussion will be directed to the
-issue tracker.
-
-(We also have an IRC channel, `#python-mypy` on irc.freenode.net.
-This is lightly used, we have mostly switched to the gitter room
-mentioned above.)
-
-#### Code of Conduct
-
-Everyone participating in the Mypy community, and in particular in our
-issue tracker, pull requests, and IRC channel, is expected to treat
-other people with respect and more generally to follow the guidelines
-articulated in the [Python Community Code of
-Conduct](https://www.python.org/psf/codeofconduct/).
-
-
-Submitting Changes
-------------------
-
-Even more excellent than a good bug report is a fix for a bug, or the
-implementation of a much-needed new feature. (*)  We'd love to have
-your contributions.
-
-(*) If your new feature will be a lot of work, we recommend talking to
-    us early -- see below.
-
-We use the usual GitHub pull-request flow, which may be familiar to
-you if you've contributed to other projects on GitHub.  For the mechanics,
-see [our git and GitHub workflow help page](https://github.com/python/mypy/wiki/Using-Git-And-GitHub),
-or [GitHub's own documentation](https://help.github.com/articles/using-pull-requests/).
-
-Anyone interested in Mypy may review your code.  One of the Mypy core
-developers will merge your pull request when they think it's ready.
-For every pull request, we aim to promptly either merge it or say why
-it's not yet ready; if you go a few days without a reply, please feel
-free to ping the thread by adding a new comment.
-
-At present the core developers are (alphabetically):
-* David Fisher (@ddfisher)
-* Jukka Lehtosalo (@JukkaL)
-* Greg Price (@gnprice)
-* Guido van Rossum (@gvanrossum)
-
-
-Preparing Changes
------------------
-
-Before you begin: if your change will be a significant amount of work
-to write, we highly recommend starting by opening an issue laying out
-what you want to do.  That lets a conversation happen early in case
-other contributors disagree with what you'd like to do or have ideas
-that will help you do it.
-
-The best pull requests are focused, clearly describe what they're for
-and why they're correct, and contain tests for whatever changes they
-make to the code's behavior.  As a bonus these are easiest for someone
-to review, which helps your pull request get merged quickly!  Standard
-advice about good pull requests for open-source projects applies; we
-have [our own writeup](https://github.com/python/mypy/wiki/Good-Pull-Request)
-of this advice.
-
-See also our [coding conventions](https://github.com/python/mypy/wiki/Code-Conventions) --
-which consist mainly of a reference to
-[PEP 8](https://www.python.org/dev/peps/pep-0008/) -- for the code you
-put in the pull request.
-
-You may also find other pages in the
-[Mypy developer guide](https://github.com/python/mypy/wiki/Developer-Guides)
-helpful in developing your change.
-
-
-Core developer guidelines
--------------------------
-
-Core developers should follow these rules when processing pull requests:
-
-* Always wait for tests to pass before merging PRs.
-* Use "[Squash and merge](https://github.com/blog/2141-squash-your-commits)"
-  to merge PRs.
-* Delete branches for merged PRs (by core devs pushing to the main repo).
-
-
-Issue-tracker conventions
--------------------------
-
-We aim to reply to all new issues promptly.  We'll assign a milestone
-to help us track which issues we intend to get to when, and may apply
-labels to carry some other information.  Here's what our milestones
-and labels mean.
-
-### Task priority and sizing
-
-We use GitHub "labels" ([see our
-list](https://github.com/python/mypy/labels)) to roughly order what we
-want to do soon and less soon.  There's two dimensions taken into
-account: **priority** (does it matter to our users) and **size** (how
-long will it take to complete).
-
-Bugs that aren't a huge deal but do matter to users and don't seem
-like a lot of work to fix generally will be dealt with sooner; things
-that will take longer may go further out.
-
-We are trying to keep the backlog at a manageable size, an issue that is
-unlikely to be acted upon in foreseeable future is going to be
-respectfully closed.  This doesn't mean the issue is not important, but
-rather reflects the limits of the team.
-
-The **question** label is for issue threads where a user is asking a
-question but it isn't yet clear that it represents something to actually
-change.  We use the issue tracker as the preferred venue for such
-questions, even when they aren't literally issues, to keep down the
-number of distinct discussion venues anyone needs to track.  These might
-evolve into a bug or feature request.
-
-Issues **without a priority or size** haven't been triaged.  We aim to
-triage all new issues promptly, but there are some issues from previous
-years that we haven't yet re-reviewed since adopting these conventions.
-
-### Other labels
-
-* **needs discussion**: This issue needs agreement on some kind of
-  design before it makes sense to implement it, and it either doesn't
-  yet have a design or doesn't yet have agreement on one.
-* **feature**, **bug**, **crash**, **refactoring**, **documentation**:
-  These classify the user-facing impact of the change.  Specifically
-  "refactoring" means there should be no user-facing effect.
-* **topic-** labels group issues touching a similar aspect of the
-  project, for example PEP 484 compatibility, a specific command-line
-  option or dependency.
diff --git a/CREDITS b/CREDITS
deleted file mode 100644
index d4fe9ee..0000000
--- a/CREDITS
+++ /dev/null
@@ -1,101 +0,0 @@
-Credits
--------
-
-Lead developer:
-
-  Jukka Lehtosalo <jukka.lehtosalo at iki.fi>
-
-Core team:
-
-  Guido <guido at dropbox.com>
-  David Fisher <ddfisher at dropbox.com>
-  Greg Price <gregprice at dropbox.com>
-
-Contributors (in alphabetical order, including typeshed):
-
-  Tim Abbott
-  Steven Allen (@Stebalien)
-  Della Anjeh
-  Reid Barton (@rwbarton)
-  Matthias Bussonnier
-  Anup Chenthamarakshan
-  Kyle Consalus
-  Ian Cordasco
-  ctcutler
-  Ben Darnell
-  Miguel Garcia (@rockneurotiko)
-  Mihnea Giurgea
-  Ryan Gonzalez (@kirbyfan64)
-  James Guthrie
-  Jared Hance
-  Ashley Hewson (@ashleyh)
-  icoxfog417
-  Bob Ippolito (@etrepum)
-  ismail-s
-  Sander Kersten (@spkersten)
-  Matthias Kramm
-  Ian Kronquist (@iankronquist)
-  Yuval Langer
-  Howard Lee
-  Tad Leonard
-  Li Haoyi
-  Darjus Loktevic
-  Ben Longbons
-  Florian Ludwig (@FlorianLudwig)
-  Robert T. McGibbon
-  Ron Murawski <ron at horizonchess.com>
-  Motoki Naruse
-  Jared Pochtar (@jaredp)
-  Michal Pokorný
-  Eric Price (@ecprice)
-  Brodie Rao
-  Sebastian Reuße
-  Sebastian Riikonen
-  Seo Sanghyeon
-  Marek Sapota
-  Gigi Sayfan
-  Vlad Shcherbina
-  Anders Schuller
-  Daniel Shaulov
-  David Shea
-  Vita Smid
-  Schuyler Smith
-  Marcell Vazquez-Chanlatte (@mvcisback)
-  Prayag Verma
-  Igor Vuk (@ivuk)
-  Jeff Walden (@jswalden)
-  Michael Walter
-  Jing Wang
-  Wen Zhang
-  Roy Williams
-  wizzardx
-  Matthew Wright
-  Yuanchao Zhu (@yczhu)
-  Gennadiy Zlobin (@gennad)
-
-Additional thanks to:
-
-  Alex Allain
-  Max Bolingbroke
-  Peter Calvert
-  Kannan Goundan
-  Kathy Gray
-  David J Greaves
-  Riitta Ikonen
-  Terho Ikonen
-  Stephen Kell
-  Łukasz Langa
-  Laura Lehtosalo
-  Peter Ludemann
-  Seppo Mattila
-  Robin Message
-  Alan Mycroft
-  Dominic Orchard
-  Pekka Rapinoja
-  Matt Robben
-  Satnam Singh
-  Juha Sorva
-  Clay Sweetser
-  Jorma Tarhio
-  Jussi Tuovila
-  Andrey Vlasovskikh
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index afddd48..0000000
--- a/LICENSE
+++ /dev/null
@@ -1,227 +0,0 @@
-Mypy is licensed under the terms of the MIT license, reproduced below.
-
-= = = = =
-
-The MIT License
-
-Copyright (c) 2015-2016 Jukka Lehtosalo and contributors
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
-= = = = =
-
-Portions of mypy are licensed under different licenses.  The files
-under stdlib-samples are licensed under the PSF 2 License, reproduced below.
-
-= = = = =
-
-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
---------------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using this software ("Python") in source or binary form and
-its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF hereby
-grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
-analyze, test, perform and/or display publicly, prepare derivative works,
-distribute, and otherwise use Python alone or in any derivative version,
-provided, however, that PSF's License Agreement and PSF's notice of copyright,
-i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
-alone or in any derivative version prepared by Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python.
-
-4. PSF is making Python available to Licensee on an "AS IS"
-basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee.  This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
--------------------------------------------
-
-BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
-
-1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
-office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
-Individual or Organization ("Licensee") accessing and otherwise using
-this software in source or binary form and its associated
-documentation ("the Software").
-
-2. Subject to the terms and conditions of this BeOpen Python License
-Agreement, BeOpen hereby grants Licensee a non-exclusive,
-royalty-free, world-wide license to reproduce, analyze, test, perform
-and/or display publicly, prepare derivative works, distribute, and
-otherwise use the Software alone or in any derivative version,
-provided, however, that the BeOpen Python License is retained in the
-Software, alone or in any derivative version prepared by Licensee.
-
-3. BeOpen is making the Software available to Licensee on an "AS IS"
-basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
-SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
-AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
-DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-5. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-6. This License Agreement shall be governed by and interpreted in all
-respects by the law of the State of California, excluding conflict of
-law provisions.  Nothing in this License Agreement shall be deemed to
-create any relationship of agency, partnership, or joint venture
-between BeOpen and Licensee.  This License Agreement does not grant
-permission to use BeOpen trademarks or trade names in a trademark
-sense to endorse or promote products or services of Licensee, or any
-third party.  As an exception, the "BeOpen Python" logos available at
-http://www.pythonlabs.com/logos.html may be used according to the
-permissions granted on that web page.
-
-7. By copying, installing or otherwise using the software, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
----------------------------------------
-
-1. This LICENSE AGREEMENT is between the Corporation for National
-Research Initiatives, having an office at 1895 Preston White Drive,
-Reston, VA 20191 ("CNRI"), and the Individual or Organization
-("Licensee") accessing and otherwise using Python 1.6.1 software in
-source or binary form and its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, CNRI
-hereby grants Licensee a nonexclusive, royalty-free, world-wide
-license to reproduce, analyze, test, perform and/or display publicly,
-prepare derivative works, distribute, and otherwise use Python 1.6.1
-alone or in any derivative version, provided, however, that CNRI's
-License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
-1995-2001 Corporation for National Research Initiatives; All Rights
-Reserved" are retained in Python 1.6.1 alone or in any derivative
-version prepared by Licensee.  Alternately, in lieu of CNRI's License
-Agreement, Licensee may substitute the following text (omitting the
-quotes): "Python 1.6.1 is made available subject to the terms and
-conditions in CNRI's License Agreement.  This Agreement together with
-Python 1.6.1 may be located on the Internet using the following
-unique, persistent identifier (known as a handle): 1895.22/1013.  This
-Agreement may also be obtained from a proxy server on the Internet
-using the following URL: http://hdl.handle.net/1895.22/1013".
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python 1.6.1 or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python 1.6.1.
-
-4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
-basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. This License Agreement shall be governed by the federal
-intellectual property law of the United States, including without
-limitation the federal copyright law, and, to the extent such
-U.S. federal law does not apply, by the law of the Commonwealth of
-Virginia, excluding Virginia's conflict of law provisions.
-Notwithstanding the foregoing, with regard to derivative works based
-on Python 1.6.1 that incorporate non-separable material that was
-previously distributed under the GNU General Public License (GPL), the
-law of the Commonwealth of Virginia shall govern this License
-Agreement only as to issues arising under or with respect to
-Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
-License Agreement shall be deemed to create any relationship of
-agency, partnership, or joint venture between CNRI and Licensee.  This
-License Agreement does not grant permission to use CNRI trademarks or
-trade name in a trademark sense to endorse or promote products or
-services of Licensee, or any third party.
-
-8. By clicking on the "ACCEPT" button where indicated, or by copying,
-installing or otherwise using Python 1.6.1, Licensee agrees to be
-bound by the terms and conditions of this License Agreement.
-
-        ACCEPT
-
-
-CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
---------------------------------------------------
-
-Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
-The Netherlands.  All rights reserved.
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name of Stichting Mathematisch
-Centrum or CWI not be used in advertising or publicity pertaining to
-distribution of the software without specific, written prior
-permission.
-
-STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
-THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
-FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-= = = = =
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
index e8e949f..f184bf5 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,2 +1,6 @@
 recursive-include scripts *
-recursive-exclude scripts myunit
+recursive-include test-data *
+recursive-include extensions *
+recursive-include docs *
+include runtests.py
+include mypy_self_check.ini
diff --git a/PKG-INFO b/PKG-INFO
index 641f4ed..c457957 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,11 +1,12 @@
 Metadata-Version: 1.1
 Name: mypy
-Version: 0.521
+Version: 0.530
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
 Author-email: jukka.lehtosalo at iki.fi
 License: MIT License
+Description-Content-Type: UNKNOWN
 Description: Mypy -- Optional Static Typing for Python
         =========================================
         
@@ -17,7 +18,7 @@ Description: Mypy -- Optional Static Typing for Python
         types.
         
 Platform: POSIX
-Classifier: Development Status :: 2 - Pre-Alpha
+Classifier: Development Status :: 3 - Alpha
 Classifier: Environment :: Console
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
diff --git a/README.md b/README.md
index e2e36f9..18c726c 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
 Mypy: Optional Static Typing for Python
 =======================================
 
-[![Build Status](https://travis-ci.org/python/mypy.svg)](https://travis-ci.org/python/mypy)
+[![Build Status](https://api.travis-ci.org/python/mypy.svg?branch=master)](https://travis-ci.org/python/mypy)
 [![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
 
 
@@ -25,22 +25,15 @@ What is mypy?
 -------------
 
 Mypy is an optional static type checker for Python.  You can add type
-hints to your Python programs using the standard for type
-annotations introduced in Python 3.5 ([PEP 484](https://www.python.org/dev/peps/pep-0484/)), and use mypy to
-type check them statically. Find bugs in your programs without even
-running them!
-
-The type annotation standard has also been backported to earlier
-Python 3.x versions.  Mypy supports Python 3.3 and later.
-
-For Python 2.7, you can add annotations as comments (this is also
-specified in [PEP 484](https://www.python.org/dev/peps/pep-0484/)).
+hints ([PEP 484](https://www.python.org/dev/peps/pep-0484/)) to your
+Python programs, and use mypy to type check them statically.
+Find bugs in your programs without even running them!
 
 You can mix dynamic and static typing in your programs. You can always
 fall back to dynamic typing when static typing is not convenient, such
 as for legacy code.
 
-Here is a small example to whet your appetite:
+Here is a small example to whet your appetite (Python 3):
 
 ```python
 from typing import Iterator
@@ -51,11 +44,20 @@ def fib(n: int) -> Iterator[int]:
         yield a
         a, b = b, a + b
 ```
+See [the documentation](http://mypy.readthedocs.io/en/stable/introduction.html) for more examples.
+
+For Python 2.7, the standard annotations are written as comments:
+```python
+def is_palindrome(s):
+    # type: (str) -> bool
+    return s == s[::-1]
+```
+
+See [the documentation for Python 2 support](http://mypy.readthedocs.io/en/latest/python2.html).
 
 Mypy is in development; some features are missing and there are bugs.
 See 'Development status' below.
 
-
 Requirements
 ------------
 
@@ -104,7 +106,7 @@ Mypy can be integrated into popular IDEs:
 
 * Vim: [vim-mypy](https://github.com/Integralist/vim-mypy)
 * Emacs: using [Flycheck](https://github.com/flycheck/) and [Flycheck-mypy](https://github.com/lbolla/emacs-flycheck-mypy/issues)
-* Sublime Text: [SublimeLinter-contrib-mypy]
+* Sublime Text: [SublimeLinter-contrib-mypy](https://github.com/fredcallaway/SublimeLinter-contrib-mypy)
 * Atom: [linter-mypy](https://atom.io/packages/linter-mypy)
 * PyCharm: PyCharm integrates [its own implementation of PEP 484](https://www.jetbrains.com/help/pycharm/2017.1/type-hinting-in-pycharm.html).
 
@@ -129,7 +131,7 @@ Or you can jump straight to the documentation:
 Troubleshooting
 ---------------
 
-Depending on your configuration, you may have to run `pip3` like
+Depending on your configuration, you may have to run `pip` like
 this:
 
     $ python3 -m pip install -U mypy
@@ -140,8 +142,8 @@ can install it manually:
 
     $ python3 -m pip install -U typed-ast
 
-If the `mypy` command isn't found after installation: After either
-`pip3 install` or `setup.py install`, the `mypy` script and
+If the `mypy` command isn't found after installation: After
+`python3 -m pip install`, the `mypy` script and
 dependencies, including the `typing` module, will be installed to
 system-dependent locations.  Sometimes the script directory will not
 be in `PATH`, and you have to add the target directory to `PATH`
@@ -221,20 +223,11 @@ See [Test README.md](test-data/unit/README.md)
 Development status
 ------------------
 
-Mypy is work in progress and is not yet production quality, though
-mypy development has been done using mypy for a while!
-
-Here are some of the more significant Python features not supported
-right now (but all of these will improve):
-
- - properties with setters not supported
- - limited metaclass support
- - only a subset of Python standard library modules are supported, and some
-   only partially
- - 3rd party module support is limited
+Mypy is alpha software, but it has already been used in production
+for well over a year at Dropbox, and it has an extensive test suite.
 
-The current development focus is to have a good coverage of Python
-features and the standard library (both 3.x and 2.7).
+See [the roadmap](ROADMAP.md) if you are interested in plans for the
+future.
 
 
 Issue tracker
@@ -254,9 +247,8 @@ Help wanted
 Any help in testing, development, documentation and other tasks is
 highly appreciated and useful to the project. There are tasks for
 contributors of all experience levels. If you're just getting started,
-check out the
-[difficulty/easy](https://github.com/python/mypy/labels/difficulty%2Feasy)
-label.
+ask on the [gitter chat](https://gitter.im/python/typing) for ideas of good
+beginner issues.
 
 For more details, see the file [CONTRIBUTING.md](CONTRIBUTING.md).
 
diff --git a/ROADMAP.md b/ROADMAP.md
deleted file mode 100644
index 132d53c..0000000
--- a/ROADMAP.md
+++ /dev/null
@@ -1,96 +0,0 @@
-# Mypy Roadmap
-
-The goal of the roadmap is to document areas the mypy core team is
-planning to work on in the future or is currently working on. PRs
-targeting these areas are very welcome, but please check first with a
-core team member that nobody else is working on the same thing.
-
-**Note:** This doesn’t include everything that the core team will work
-on, and everything is subject to change. Near-term plans are likely
-more accurate.
-
-## April-June 2017
-
-- Add more comprehensive testing for `--incremental` and `--quick`
-  modes to improve reliability. At least write more unit tests with
-  focus on areas that have previously had bugs.
-  ([issue](https://github.com/python/mypy/issues/3455))
-
-- Speed up `--quick` mode to better support million+ line codebases
-  through some of these:
-
-  - Make it possible to use remote caching for incremental cache
-    files. This would speed up a cold run with no local cache data.
-    We need to update incremental cache to use hashes to determine
-    whether files have changes to allow
-    [sharing cache data](https://github.com/python/mypy/issues/3403).
-
-  - See if we can speed up deserialization of incremental cache
-    files. Initial experiments aren’t very promising though so there
-    might not be any easy wins left.
-    ([issue](https://github.com/python/mypy/issues/3456))
-
-- Improve support for complex signatures such as `open(fn, 'rb')` and
-  specific complex decorators such as `contextlib.contextmanager`
-  through type checker plugins/hooks.
-  ([issue](https://github.com/python/mypy/issues/1240))
-
-- Document basic properties of all type operations used within mypy,
-  including compatibility, proper subtyping, joins and meets.
-  ([issue](https://github.com/python/mypy/issues/3454))
-
-- Make TypedDict an officially supported mypy feature. This makes it
-  possible to give precise types for dictionaries that represent JSON
-  objects, such as `{"path": "/dir/fnam.ext", "size": 1234}`.
-  ([issue](https://github.com/python/mypy/issues/3453))
-
-- Make error messages more useful and informative.
-  ([issue](https://github.com/python/mypy/labels/topic-usability))
-
-- Resolve [#2008](https://github.com/python/mypy/issues/2008) (we are
-  converging on approach 4).
-
-## July-December 2017
-
-- Invest some effort into systematically filling in missing
-  annotations and stubs in typeshed, with focus on features heavily
-  used at Dropbox. Better support for ORMs will be a separate
-  project.
-
-- Improve opt-in warnings about `Any` types to make it easier to keep
-  code free from unwanted `Any` types. For example, warn about using
-  `list` (instead of `List[x]`) and calling `open` if we can’t infer a
-  precise return type, or using types imported from ignored modules
-  (they are implicitly `Any`).
-
-- Add support for protocols and structural subtyping (PEP 544).
-
-- Switch completely to pytest and remove the custom testing framework.
-  ([issue](https://github.com/python/mypy/issues/1673))
-
-- Make it possible to run mypy as a daemon to avoid reprocessing the
-  entire program on each run. This will improve performance
-  significantly. Even when using the incremental mode, processing a
-  large number of files is not cheap.
-
-- Refactor and simplify specific tricky parts of mypy internals, such
-  as the [conditional type binder](https://github.com/python/mypy/issues/3457),
-  [symbol tables](https://github.com/python/mypy/issues/3458) or
-  the various [semantic analysis passes](https://github.com/python/mypy/issues/3459).
-
-- Implement a general type system plugin architecture. It should be
-  able to support some typical ORM features at least, such as
-  metaclasses that add methods with automatically inferred signatures
-  and complex descriptors such as those used by Django models.
-  ([issue](https://github.com/python/mypy/issues/1240))
-
-- Add support for statically typed
-  [protobufs](https://developers.google.com/protocol-buffers/).
-
-- Provide much faster, reliable interactive feedback through
-  fine-grained incremental type checking, built on top the daemon
-  mode.
-
-- Start work on editor plugins and support for selected IDE features.
-
-- Turn on `--strict-optional` by default.
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index 5612c20..0000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-environment:
-  matrix:
-
-    - PYTHON: "C:\\Python36-x64"
-      PYTHON_VERSION: "3.6.x"
-      PYTHON_ARCH: "64"
-
-install:
-    - "git config core.symlinks true"
-    - "git reset --hard"
-    - "%PYTHON%\\python.exe -m pip install -r test-requirements.txt"
-    - "git submodule update --init typeshed"
-    - "cd typeshed && git config core.symlinks true && git reset --hard && cd .."
-    - "%PYTHON%\\python.exe setup.py -q install"
-
-build: off
-
-test_script:
-    # Ignore lint (it's run in Travis)
-    - "%PYTHON%\\python.exe runtests.py -x lint"
-
-after_test:
-  - "%PYTHON%\\python.exe -m pip install wheel"
-  - "%PYTHON%\\python.exe setup.py bdist_wheel -p win32"
-  - "%PYTHON%\\python.exe setup.py bdist_wheel -p win_amd64"
-
-artifacts:
-  - path: dist\*
-
-skip_commits:
-  files:
-    - docs/**/*
-    - '**/*.rst'
-    - '**/*.md'
-    - .gitignore
-    - .runtest_log.json
-    - .travis.yml
-    - CREDITS
-    - LICENSE
-
-skip_branch_with_pr: true
diff --git a/build-requirements.txt b/build-requirements.txt
deleted file mode 100644
index 0a8547b..0000000
--- a/build-requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-setuptools
-wheel
diff --git a/conftest.py b/conftest.py
deleted file mode 100644
index 4c3b890..0000000
--- a/conftest.py
+++ /dev/null
@@ -1,13 +0,0 @@
-import os.path
-
-import pytest
-
-pytest_plugins = [
-    'mypy.test.data',
-]
-
-
-def pytest_configure(config):
-    mypy_source_root = os.path.dirname(os.path.abspath(__file__))
-    if os.getcwd() != mypy_source_root:
-        os.chdir(mypy_source_root)
diff --git a/docs/source/basics.rst b/docs/source/basics.rst
index 572364d..87999c0 100644
--- a/docs/source/basics.rst
+++ b/docs/source/basics.rst
@@ -17,7 +17,7 @@ A function without a type annotation is considered dynamically typed:
 
 You can declare the signature of a function using the Python 3
 annotation syntax (Python 2 is discussed later in :ref:`python2`).
-This makes the the function statically typed, and that causes type
+This makes the function statically typed, and that causes type
 checker report type errors within the function.
 
 Here's a version of the above function that is statically typed and
diff --git a/docs/source/class_basics.rst b/docs/source/class_basics.rst
index dc778d3..3dc116c 100644
--- a/docs/source/class_basics.rst
+++ b/docs/source/class_basics.rst
@@ -151,7 +151,182 @@ concrete. As with normal overrides, a dynamically typed method can
 implement a statically typed abstract method defined in an abstract
 base class.
 
+.. _protocol-types:
+
+Protocols and structural subtyping
+**********************************
+
+.. note::
+
+   Structural subtyping is experimental. Some things may not
+   work as expected. Mypy may pass unsafe code or it can reject
+   valid code.
+
+Mypy supports two ways of deciding whether two classes are compatible
+as types: nominal subtyping and structural subtyping. *Nominal*
+subtyping is strictly based on the class hierarchy. If class ``D``
+inherits class ``C``, it's also a subtype of ``C``. This form of
+subtyping is used by default in mypy, since it's easy to understand
+and produces clear and concise error messages, and since it matches
+how the native ``isinstance()`` check works -- based on class
+hierarchy. *Structural* subtyping can also be useful. Class ``D`` is
+a structural subtype of class ``C`` if the former has all attributes
+and methods of the latter, and with compatible types.
+
+Structural subtyping can be seen as a static equivalent of duck
+typing, which is well known to Python programmers. Mypy provides an
+opt-in support for structural subtyping via protocol classes described
+below.  See `PEP 544 <https://www.python.org/dev/peps/pep-0544/>`_ for
+the detailed specification of protocols and structural subtyping in
+Python.
+
+Simple user-defined protocols
+*****************************
+
+You can define a protocol class by inheriting the special
+``typing_extensions.Protocol`` class:
+
+.. code-block:: python
+
+   from typing import Iterable
+   from typing_extensions import Protocol
+
+   class SupportsClose(Protocol):
+       def close(self) -> None:
+          ...  # Explicit '...'
+
+   class Resource:  # No SupportsClose base class!
+       # ... some methods ...
+
+       def close(self) -> None:
+          self.resource.release()
+
+   def close_all(items: Iterable[SupportsClose]) -> None:
+       for item in items:
+           item.close()
+
+   close_all([Resource(), open('some/file')])  # Okay!
+
+``Resource`` is a subtype of the ``SupportClose`` protocol since it defines
+a compatible ``close`` method. Regular file objects returned by ``open()`` are
+similarly compatible with the protocol, as they support ``close()``.
+
+.. note::
+
+   The ``Protocol`` base class is currently provided in the ``typing_extensions``
+   package. Once structural subtyping is mature and
+   `PEP 544 <https://www.python.org/dev/peps/pep-0544/>`_ has been accepted,
+   ``Protocol`` will be included in the ``typing`` module. Several library
+   types such as ``typing.Sized`` and ``typing.Iterable`` will also be changed
+   into protocols. They are currently treated as regular ABCs by mypy.
+
+Defining subprotocols
+*********************
+
+You can also define subprotocols. Existing protocols can be extended
+and merged using multiple inheritance. Example:
+
+.. code-block:: python
+
+   # ... continuing from the previous example
+
+   class SupportsRead(Protocol):
+       def read(self, amount: int) -> bytes: ...
+
+   class TaggedReadableResource(SupportsClose, SupportsRead, Protocol):
+       label: str
+
+   class AdvancedResource(Resource):
+       def __init__(self, label: str) -> None:
+           self.label = label
+
+       def read(self, amount: int) -> bytes:
+           # some implementation
+           ...
+
+   resource: TaggedReadableResource
+   resource = AdvancedResource('handle with care')  # OK
+
+Note that inheriting from an existing protocol does not automatically
+turn the subclass into a protocol -- it just creates a regular
+(non-protocol) ABC that implements the given protocol (or
+protocols). The ``typing_extensions.Protocol`` base class must always
+be explicitly present if you are defining a protocol:
+
+.. code-block:: python
+
+   class NewProtocol(SupportsClose):  # This is NOT a protocol
+       new_attr: int
+
+   class Concrete:
+      new_attr: int = 0
+
+      def close(self) -> None:
+          ...
+
+   # Error: nominal subtyping used by default
+   x: NewProtocol = Concrete()  # Error!
+
 .. note::
 
-   There are also plans to support more Python-style "duck typing" in
-   the type system. The details are still open.
+   You can use Python 3.6 variable annotations (`PEP 526
+   <https://www.python.org/dev/peps/pep-0526/>`_)
+   to declare protocol attributes.  On Python 2.7 and earlier Python 3
+   versions you can use type comments and properties.
+
+Recursive protocols
+*******************
+
+Protocols can be recursive (self-referential) and mutually
+recursive. This is useful for declaring abstract recursive collections
+such as trees and linked lists:
+
+.. code-block:: python
+
+   from typing import TypeVar, Optional
+   from typing_extensions import Protocol
+
+   class TreeLike(Protocol):
+       value: int
+
+       @property
+       def left(self) -> Optional['TreeLike']: ...
+
+       @property
+       def right(self) -> Optional['TreeLike']: ...
+
+   class SimpleTree:
+       def __init__(self, value: int) -> None:
+           self.value = value
+           self.left: Optional['SimpleTree'] = None
+           self.right: Optional['SimpleTree'] = None
+
+   root = SimpleTree(0)  # type: TreeLike  # OK
+
+Using ``isinstance()`` with protocols
+*************************************
+
+You can use a protocol class with ``isinstance()`` if you decorate it
+with the ``typing_extensions.runtime`` class decorator. The decorator
+adds support for basic runtime structural checks:
+
+.. code-block:: python
+
+   from typing_extensions import Protocol, runtime
+
+   @runtime
+   class Portable(Protocol):
+       handles: int
+
+   class Mug:
+       def __init__(self) -> None:
+           self.handles = 1
+
+   mug = Mug()
+   if isinstance(mug, Portable):
+      use(mug.handles)  # Works statically and at runtime
+
+.. note::
+   ``isinstance()`` with protocols is not completely safe at runtime.
+   For example, signatures of methods are not checked. The runtime
+   implementation only checks that all protocol members are defined.
diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst
index 5df9df2..a8ef653 100644
--- a/docs/source/command_line.rst
+++ b/docs/source/command_line.rst
@@ -322,9 +322,15 @@ Here are some more useful flags:
   annotations are not type checked.)  It will assume all arguments
   have type ``Any`` and always infer ``Any`` as the return type.
 
+- ``--disallow-incomplete-defs`` reports an error whenever it
+  encounters a partly annotated function definition.
+
 - ``--disallow-untyped-calls`` reports an error whenever a function
   with type annotations calls a function defined without annotations.
 
+- ``--disallow-untyped-decorators`` reports an error whenever a function
+  with type annotations is decorated with a decorator without annotations.
+
 .. _disallow-subclassing-any:
 
 - ``--disallow-subclassing-any`` reports an error whenever a class
diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst
index 0c8b500..e31d9a7 100644
--- a/docs/source/common_issues.rst
+++ b/docs/source/common_issues.rst
@@ -226,6 +226,48 @@ Possible strategies in such situations are:
          return x[0]
      f_good(new_lst) # OK
 
+Covariant subtyping of mutable protocol members is rejected
+-----------------------------------------------------------
+
+Mypy rejects this because this is potentially unsafe.
+Consider this example:
+
+.. code-block:: python
+
+   from typing_extensions import Protocol
+
+   class P(Protocol):
+       x: float
+
+   def fun(arg: P) -> None:
+       arg.x = 3.14
+
+   class C:
+       x = 42
+   c = C()
+   fun(c)  # This is not safe
+   c.x << 5  # Since this will fail!
+
+To work around this problem consider whether "mutating" is actually part
+of a protocol. If not, then one can use a ``@property`` in
+the protocol definition:
+
+.. code-block:: python
+
+   from typing_extensions import Protocol
+
+   class P(Protocol):
+       @property
+       def x(self) -> float:
+          pass
+
+   def fun(arg: P) -> None:
+       ...
+
+   class C:
+       x = 42
+   fun(C())  # OK
+
 Declaring a supertype as variable type
 --------------------------------------
 
diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst
index 6a32414..9f5c832 100644
--- a/docs/source/config_file.rst
+++ b/docs/source/config_file.rst
@@ -34,6 +34,11 @@ characters.
   separated by commas.  These sections specify additional flags that
   only apply to *modules* whose name matches at least one of the patterns.
 
+.. note::
+
+   The ``warn_unused_configs`` flag may be useful to debug misspelled
+   section names.
+
 Global flags
 ************
 
@@ -72,6 +77,10 @@ The following global flags may only be set in the global section
 - ``warn_unused_ignores`` (Boolean, default False) warns about
   unneeded ``# type: ignore`` comments.
 
+- ``warn_unused_configs`` (Boolean, default False) warns about
+  per-module sections in the config file that didn't match any
+  files processed in the current run.
+
 - ``strict_optional`` (Boolean, default False) enables experimental
   strict Optional checks.
 
@@ -124,7 +133,7 @@ overridden by the pattern sections matching the module name.
 .. note::
 
    If multiple pattern sections match a module they are processed in
-   unspecified order.
+   order of their occurrence in the config file.
 
 - ``follow_imports`` (string, default ``normal``) directs what to do
   with imports when the imported module is found as a ``.py`` file and
@@ -153,6 +162,10 @@ overridden by the pattern sections matching the module name.
   ``unannotated``, ``expr``, ``decorated``, ``explicit``, ``generics``.
   For explanations see the discussion for the :ref:`--disallow-any <disallow-any>` option.
 
+- ``disallow_subclassing_any`` (Boolean, default False) disallows
+  subclassing a value of type ``Any``.  See
+  :ref:`--disallow-subclassing-any <disallow-subclassing-any>` option.
+
 - ``disallow_untyped_calls`` (Boolean, default False) disallows
   calling functions without type annotations from functions with type
   annotations.
diff --git a/docs/source/faq.rst b/docs/source/faq.rst
index 9fd73b4..b131e3f 100644
--- a/docs/source/faq.rst
+++ b/docs/source/faq.rst
@@ -101,35 +101,38 @@ Is mypy free?
 Yes. Mypy is free software, and it can also be used for commercial and
 proprietary projects. Mypy is available under the MIT license.
 
-Why not use structural subtyping?
-*********************************
+Can I use structural subtyping?
+*******************************
 
-Mypy primarily uses `nominal subtyping
-<https://en.wikipedia.org/wiki/Nominative_type_system>`_ instead of
+Mypy provides support for both `nominal subtyping
+<https://en.wikipedia.org/wiki/Nominative_type_system>`_ and
 `structural subtyping
-<https://en.wikipedia.org/wiki/Structural_type_system>`_. Some argue
-that structural subtyping is better suited for languages with duck
-typing such as Python.
-
-Here are some reasons why mypy uses nominal subtyping:
+<https://en.wikipedia.org/wiki/Structural_type_system>`_.
+Support for structural subtyping is considered experimental.
+Some argue that structural subtyping is better suited for languages with duck
+typing such as Python. Mypy however primarily uses nominal subtyping,
+leaving structural subtyping opt-in. Here are some reasons why:
 
 1. It is easy to generate short and informative error messages when
    using a nominal type system. This is especially important when
    using type inference.
 
-2. Python supports basically nominal isinstance tests and they are
-   widely used in programs. It is not clear how to support isinstance
-   in a purely structural type system while remaining compatible with
-   Python idioms.
+2. Python provides built-in support for nominal ``isinstance()`` tests and
+   they are widely used in programs. Only limited support for structural
+   ``isinstance()`` exists for ABCs in ``collections.abc`` and ``typing``
+   standard library modules.
 
 3. Many programmers are already familiar with nominal subtyping and it
    has been successfully used in languages such as Java, C++ and
    C#. Only few languages use structural subtyping.
 
-However, structural subtyping can also be useful. Structural subtyping
-is a likely feature to be added to mypy in the future, even though we
-expect that most mypy programs will still primarily use nominal
-subtyping.
+However, structural subtyping can also be useful. For example, a "public API"
+may be more flexible if it is typed with protocols. Also, using protocol types
+removes the necessity to explicitly declare implementations of ABCs.
+As a rule of thumb, we recommend using nominal classes where possible, and
+protocols where necessary. For more details about protocol types and structural
+subtyping see :ref:`protocol-types` and
+`PEP 544 <https://www.python.org/dev/peps/pep-0544/>`_.
 
 I like Python and I have no need for static typing
 **************************************************
diff --git a/docs/source/function_overloading.rst b/docs/source/function_overloading.rst
index 43f365b..5e7d2c5 100644
--- a/docs/source/function_overloading.rst
+++ b/docs/source/function_overloading.rst
@@ -56,7 +56,7 @@ accurately describe the function's behavior.
         # It may or may not have type hints; if it does,
         # these are checked against the overload definitions
         # as well as against the implementation body.
-        def __getitem__(self, index):
+        def __getitem__(self, index: Union[int, slice]) -> Union[T, Sequence[T]]:
             # This is exactly the same as before.
             if isinstance(index, int):
                 ...  # Return a T here
@@ -65,6 +65,29 @@ accurately describe the function's behavior.
             else:
                 raise TypeError(...)
 
+Calls to overloaded functions are type checked against the variants,
+not against the implementation. A call like ``my_list[5]`` would have
+type ``T``, not ``Union[T, Sequence[T]]`` because it matches the
+first overloaded definition, and ignores the type annotations on the
+implementation of ``__getitem__``. The code in the body of the
+definition of ``__getitem__`` is checked against the annotations on
+the the corresponding declaration. In this case the body is checked
+with ``index: Union[int, slice]`` and a return type
+``Union[T, Sequence[T]]``. If there are no annotations on the
+corresponding definition, then code in the function body is not type
+checked.
+
+The annotations on the function body must be compatible with the
+types given for the overloaded variants listed above it. The type
+checker will verify that all the types listed the overloaded variants
+are compatible with the types given for the implementation. In this
+case it checks that the parameter type ``int`` and the return type
+``T`` are compatible with ``Union[int, slice]`` and
+``Union[T, Sequence[T]]`` for the first variant. For the second
+variant it verifies that the parameter type ``slice`` are the return
+type ``Sequence[T]`` are compatible with ``Union[int, slice]`` and
+``Union[T, Sequence[T]]``.
+
 Overloaded function variants are still ordinary Python functions and
 they still define a single runtime object. There is no automatic
 dispatch happening, and you must manually handle the different types
diff --git a/docs/source/generics.rst b/docs/source/generics.rst
index bd0e054..2ea9b42 100644
--- a/docs/source/generics.rst
+++ b/docs/source/generics.rst
@@ -1,6 +1,8 @@
 Generics
 ========
 
+.. _generic-classes:
+
 Defining generic classes
 ************************
 
@@ -326,9 +328,9 @@ Let us illustrate this by few simple examples:
      def salaries(staff: List[Manager],
                   accountant: Callable[[Manager], int]) -> List[int]: ...
 
-  this function needs a callable that can calculate a salary for managers, and
+  This function needs a callable that can calculate a salary for managers, and
   if we give it a callable that can calculate a salary for an arbitrary
-  employee, then it is still safe.
+  employee, it's still safe.
 * ``List`` is an invariant generic type. Naively, one would think
   that it is covariant, but let us consider this code:
 
@@ -336,6 +338,7 @@ Let us illustrate this by few simple examples:
 
      class Shape:
          pass
+
      class Circle(Shape):
          def rotate(self):
              ...
@@ -347,7 +350,7 @@ Let us illustrate this by few simple examples:
      add_one(my_things)     # This may appear safe, but...
      my_things[0].rotate()  # ...this will fail
 
-  Another example of invariant type is ``Dict``, most mutable containers
+  Another example of invariant type is ``Dict``. Most mutable containers
   are invariant.
 
 By default, mypy assumes that all user-defined generics are invariant.
@@ -358,15 +361,18 @@ type variables defined with special keyword arguments ``covariant`` or
 .. code-block:: python
 
    from typing import Generic, TypeVar
+
    T_co = TypeVar('T_co', covariant=True)
 
    class Box(Generic[T_co]):  # this type is declared covariant
        def __init__(self, content: T_co) -> None:
            self._content = content
+
        def get_content(self) -> T_co:
            return self._content
 
    def look_into(box: Box[Animal]): ...
+
    my_box = Box(Cat())
    look_into(my_box)  # OK, but mypy would complain here for an invariant type
 
@@ -539,3 +545,94 @@ Also note that the ``wrapper()`` function is not type-checked. Wrapper
 functions are typically small enough that this is not a big
 problem. This is also the reason for the ``cast()`` call in the
 ``return`` statement in ``my_decorator()``. See :ref:`casts`.
+
+Generic protocols
+*****************
+
+Mypy supports generic protocols (see also :ref:`protocol-types`). Generic
+protocols mostly follow the normal rules for generic classes. Example:
+
+.. code-block:: python
+
+   from typing import TypeVar
+   from typing_extensions import Protocol
+
+   T = TypeVar('T')
+
+   class Box(Protocol[T]):
+       content: T
+
+   def do_stuff(one: Box[str], other: Box[bytes]) -> None:
+       ...
+
+   class StringWrapper:
+       def __init__(self, content: str) -> None:
+           self.content = content
+
+   class BytesWrapper:
+       def __init__(self, content: bytes) -> None:
+           self.content = content
+
+   do_stuff(StringWrapper('one'), BytesWrapper(b'other'))  # OK
+
+   x: Box[float] = ...
+   y: Box[int] = ...
+   x = y  # Error -- Box is invariant
+
+The main difference between generic protocols and ordinary generic
+classes is that mypy checks that the declared variances of generic
+type variables in a protocol match how they are used in the protocol
+definition.  The protocol in this example is rejected, since the type
+variable ``T`` is used covariantly as a return type, but the type
+variable is invariant:
+
+.. code-block:: python
+
+   from typing import TypeVar
+   from typing_extensions import Protocol
+
+   T = TypeVar('T')
+
+   class ReadOnlyBox(Protocol[T]):  # Error: covariant type variable expected
+       def content(self) -> T: ...
+
+This example correctly uses a covariant type variable:
+
+.. code-block:: python
+
+   from typing import TypeVar
+   from typing_extensions import Protocol
+
+   T_co = TypeVar('T_co', covariant=True)
+
+   class ReadOnlyBox(Protocol[T_co]):  # OK
+       def content(self) -> T_co: ...
+
+   ax: ReadOnlyBox[float] = ...
+   ay: ReadOnlyBox[int] = ...
+   ax = ay  # OK -- ReadOnlyBox is covariant
+
+See :ref:`variance-of-generics` for more about variance.
+
+Generic protocols can also be recursive. Example:
+
+.. code-block:: python
+
+   T = TypeVar('T')
+
+   class Linked(Protocol[T]):
+       val: T
+       def next(self) -> 'Linked[T]': ...
+
+   class L:
+       val: int
+
+       ...  # details omitted
+
+       def next(self) -> 'L':
+           ...  # details omitted
+
+   def last(seq: Linked[T]) -> T:
+       ...  # implementation omitted
+
+   result = last(L())  # Inferred type of 'result' is 'int'
diff --git a/docs/source/revision_history.rst b/docs/source/revision_history.rst
index dadc795..23ca45b 100644
--- a/docs/source/revision_history.rst
+++ b/docs/source/revision_history.rst
@@ -3,6 +3,20 @@ Revision history
 
 List of major changes:
 
+- October 2017
+    * Publish ``mypy`` version 0.530 on PyPI.
+
+- August-September 2017
+    * Add :ref:`protocol-types`.
+
+    * Other updates to :ref:`command-line`:
+
+      * Add ``--warn-unused-configs``.
+
+      * Add ``--disallow-untyped-decorators``.
+
+      * Add ``--disallow-incomplete-defs``.
+
 - July 2017
     * Publish ``mypy`` version 0.521 on PyPI.
 
diff --git a/docs/source/type_inference_and_annotations.rst b/docs/source/type_inference_and_annotations.rst
index 76e9cf9..2ae6396 100644
--- a/docs/source/type_inference_and_annotations.rst
+++ b/docs/source/type_inference_and_annotations.rst
@@ -78,6 +78,22 @@ type:
 
    x = 1.1  # type: Union[int, str]  # Error!
 
+Python 3.6 introduced a new syntax for variable annotations, which
+resembles function annotations:
+
+.. code-block:: python
+
+   x: Union[int, str] = 1
+
+We'll use both syntax variants in examples. The syntax variants are
+mostly interchangeable, but the Python 3.6 syntax allows defining the
+type of a variable without initialization, which is not possible with
+the comment-based syntax:
+
+.. code-block:: python
+
+   x: str  # Declare type of 'x' without initialization
+
 .. note::
 
    The best way to think about this is that the type comment sets the
diff --git a/extensions/setup.cfg b/extensions/setup.cfg
new file mode 100644
index 0000000..3c6e79c
--- /dev/null
+++ b/extensions/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal=1
diff --git a/extensions/setup.py b/extensions/setup.py
index b0ffbc5..0ec4ba1 100644
--- a/extensions/setup.py
+++ b/extensions/setup.py
@@ -1,8 +1,6 @@
-#!/usr/bin/env python
-
 # NOTE: This package must support Python 2.7 in addition to Python 3.x
 
-from distutils.core import setup
+from setuptools import setup
 
 version = '0.3.0'
 description = 'Experimental type system extensions for programs checked with the mypy typechecker.'
diff --git a/misc/actions_stubs.py b/misc/actions_stubs.py
deleted file mode 100644
index 978af71..0000000
--- a/misc/actions_stubs.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-import os
-import shutil
-from typing import Tuple, Any
-try:
-    import click
-except ImportError:
-    print("You need the module \'click\'")
-    exit(1)
-
-base_path = os.getcwd()
-
-# I don't know how to set callables with different args
-def apply_all(func: Any, directory: str, extension: str,
-            to_extension: str='', exclude: Tuple[str]=('',),
-            recursive: bool=True, debug: bool=False) -> None:
-    excluded = [x+extension for x in exclude] if exclude else []
-    for p, d, files in os.walk(os.path.join(base_path,directory)):
-        for f in files:
-            if "{}".format(f) in excluded:
-                continue
-            inner_path = os.path.join(p,f)
-            if not inner_path.endswith(extension):
-                continue
-            if to_extension:
-                new_path = "{}{}".format(inner_path[:-len(extension)],to_extension)
-                func(inner_path,new_path)
-            else:
-                func(inner_path)
-        if not recursive:
-            break
-
-def confirm(resp: bool=False, **kargs) -> bool:
-    kargs['rest'] = "to this {f2}/*{e2}".format(**kargs) if kargs.get('f2') else ''
-    prompt = "{act} all files {rec}matching this expression {f1}/*{e1} {rest}".format(**kargs)
-    prompt.format(**kargs)
-    prompt = "{} [{}]|{}: ".format(prompt, 'Y' if resp else 'N', 'n' if resp else 'y')
-    while True:
-        ans = input(prompt).lower()
-        if not ans:
-            return resp
-        if ans not in ['y','n']:
-            print( 'Please, enter (y) or (n).')
-            continue
-        if ans == 'y':
-            return True
-        else:
-            return False
-
-actions = ['cp', 'mv', 'rm']
- at click.command(context_settings=dict(help_option_names=['-h', '--help']))
- at click.option('--action', '-a', type=click.Choice(actions), required=True, help="What do I have to do :-)")
- at click.option('--dir', '-d', 'directory', default='stubs', help="Directory to start search!")
- at click.option('--ext', '-e', 'extension', default='.py', help="Extension \"from\" will be applied the action. Default .py")
- at click.option('--to', '-t', 'to_extension', default='.pyi', help="Extension \"to\" will be applied the action if can. Default .pyi")
- at click.option('--exclude', '-x', multiple=True, default=('__init__',), help="For every appear, will ignore this files. (can set multiples times)")
- at click.option('--not-recursive', '-n', default=True, is_flag=True, help="Set if don't want to walk recursively.")
-def main(action: str, directory: str, extension: str, to_extension: str,
-    exclude: Tuple[str], not_recursive: bool) -> None:
-    """
-    This script helps to copy/move/remove files based on their extension.
-
-    The three actions will ask you for confirmation.
-
-    Examples (by default the script search in stubs directory):
-
-    - Change extension of all stubs from .py to .pyi:
-
-        python <script.py> -a mv
-
-    - Revert the previous action.
-
-        python <script.py> -a mv -e .pyi -t .py
-
-    - If you want to ignore "awesome.py" files.
-
-        python <script.py> -a [cp|mv|rm] -x awesome
-
-    - If you want to ignore "awesome.py" and "__init__.py" files.
-
-        python <script.py> -a [cp|mv|rm] -x awesome -x __init__
-
-    - If you want to remove all ".todo" files in "todo" directory, but not recursively:
-
-        python <script.py> -a rm -e .todo -d todo -r
-
-    """
-    if action not in actions:
-        print("Your action have to be one of this: {}".format(', '.join(actions)))
-        return
-
-    rec = "[Recursively] " if not_recursive else ''
-    if not extension.startswith('.'):
-        extension = ".{}".format(extension)
-    if not to_extension.startswith('.'):
-        to_extension = ".{}".format(to_extension)
-    if directory.endswith('/'):
-        directory = directory[:-1]
-    if action == 'cp':
-        if confirm(act='Copy',rec=rec, f1=directory, e1=extension, f2=directory, e2=to_extension):
-            apply_all(shutil.copy, directory, extension, to_extension, exclude, not_recursive)
-    elif action == 'rm':
-        if confirm(act='Remove',rec=rec, f1=directory, e1=extension):
-            apply_all(os.remove, directory, extension, exclude=exclude, recursive=not_recursive)
-    elif action == 'mv':
-        if confirm(act='Move',rec=rec, f1=directory, e1=extension, f2=directory, e2=to_extension):
-            apply_all(shutil.move, directory, extension, to_extension, exclude, not_recursive)
-
-
-if __name__ == '__main__':
-    main()
diff --git a/misc/analyze_cache.py b/misc/analyze_cache.py
deleted file mode 100644
index 643e2bf..0000000
--- a/misc/analyze_cache.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#!/usr/bin/env python
-
-from typing import Any, Dict, Generator, Iterable, List, Optional
-from collections import Counter
-
-import os
-import os.path
-import json
-
-ROOT = ".mypy_cache/3.5"
-
-JsonDict = Dict[str, Any]
-
-class CacheData:
-    def __init__(self, filename: str, data_json: JsonDict, meta_json: JsonDict,
-                 data_size: int, meta_size: int) -> None:
-        self.filename = filename
-        self.data = data_json
-        self.meta = meta_json
-        self.data_size = data_size
-        self.meta_size = meta_size
-
-    @property
-    def total_size(self):
-        return self.data_size + self.meta_size
-
-
-def extract_classes(chunks: Iterable[CacheData]) -> Iterable[JsonDict]:
-    def extract(chunks: Iterable[JsonDict]) -> Iterable[JsonDict]:
-        for chunk in chunks:
-            if isinstance(chunk, dict):
-                yield chunk
-                yield from extract(chunk.values())
-            elif isinstance(chunk, list):
-                yield from extract(chunk)
-    yield from extract([chunk.data for chunk in chunks])
-
-
-def load_json(data_path: str, meta_path: str) -> CacheData:
-    with open(data_path, 'r') as ds:
-        data_json = json.load(ds)
-
-    with open(meta_path, 'r') as ms:
-        meta_json = json.load(ms)
-
-    data_size = os.path.getsize(data_path)
-    meta_size = os.path.getsize(meta_path)
-
-    return CacheData(data_path.replace(".data.json", ".*.json"),
-                     data_json, meta_json, data_size, meta_size)
-
-
-def get_files(root: str) -> Iterable[CacheData]:
-    for (dirpath, dirnames, filenames) in os.walk(root):
-        for filename in filenames:
-            if filename.endswith(".data.json"):
-                meta_filename = filename.replace(".data.json", ".meta.json")
-                yield load_json(
-                        os.path.join(dirpath, filename),
-                        os.path.join(dirpath, meta_filename))
-
-
-def pluck(name: str, chunks: Iterable[JsonDict]) -> Iterable[JsonDict]:
-    return (chunk for chunk in chunks if chunk['.class'] == name)
-
-
-def report_counter(counter: Counter, amount: Optional[int] = None) -> None:
-    for name, count in counter.most_common(amount):
-        print('    {: <8} {}'.format(count, name))
-    print()
-
-
-def report_most_common(chunks: List[JsonDict], amount: Optional[int] = None) -> None:
-    report_counter(Counter(str(chunk) for chunk in chunks), amount)
-
-
-def compress(chunk: JsonDict) -> JsonDict:
-    cache = {}  # type: Dict[int, JsonDict]
-    counter = 0
-    def helper(chunk: Any) -> Any:
-        nonlocal counter
-        if not isinstance(chunk, dict):
-            return chunk
-
-        if len(chunk) <= 2:
-            return chunk
-        id = hash(str(chunk))
-
-        if id in cache:
-            return cache[id]
-        else:
-            cache[id] = {'.id': counter}
-            chunk['.cache_id'] = counter
-            counter += 1
-
-        for name in sorted(chunk.keys()):
-            value = chunk[name]
-            if isinstance(value, list):
-                chunk[name] = [helper(child) for child in value]
-            elif isinstance(value, dict):
-                chunk[name] = helper(value)
-
-        return chunk
-    out = helper(chunk)
-    return out
-
-def decompress(chunk: JsonDict) -> JsonDict:
-    cache = {}  # type: Dict[int, JsonDict]
-    def helper(chunk: Any) -> Any:
-        if not isinstance(chunk, dict):
-            return chunk
-        if '.id' in chunk:
-            return cache[chunk['.id']]
-
-        counter = None
-        if '.cache_id' in chunk:
-            counter = chunk['.cache_id']
-            del chunk['.cache_id']
-
-        for name in sorted(chunk.keys()):
-            value = chunk[name]
-            if isinstance(value, list):
-                chunk[name] = [helper(child) for child in value]
-            elif isinstance(value, dict):
-                chunk[name] = helper(value)
-
-        if counter is not None:
-            cache[counter] = chunk
-
-        return chunk
-    return helper(chunk)
-
-
-
-
-def main() -> None:
-    json_chunks = list(get_files(ROOT))
-    class_chunks = list(extract_classes(json_chunks))
-
-    total_size = sum(chunk.total_size for chunk in json_chunks)
-    print("Total cache size: {:.3f} megabytes".format(total_size / (1024 * 1024)))
-    print()
-
-    class_name_counter = Counter(chunk[".class"] for chunk in class_chunks)
-    print("Most commonly used classes:")
-    report_counter(class_name_counter)
-
-    print("Most common literal chunks:")
-    report_most_common(class_chunks, 15)
-
-    build = None
-    for chunk in json_chunks:
-        if 'build.*.json' in chunk.filename:
-            build = chunk
-            break
-    original = json.dumps(build.data, sort_keys=True)
-    print("Size of build.data.json, in kilobytes: {:.3f}".format(len(original) / 1024))
-
-    build.data = compress(build.data)
-    compressed = json.dumps(build.data, sort_keys=True)
-    print("Size of compressed build.data.json, in kilobytes: {:.3f}".format(len(compressed) / 1024))
-
-    build.data = decompress(build.data)
-    decompressed = json.dumps(build.data, sort_keys=True)
-    print("Size of decompressed build.data.json, in kilobytes: {:.3f}".format(len(decompressed) / 1024))
-
-    print("Lossless conversion back", original == decompressed)
-
-
-    '''var_chunks = list(pluck("Var", class_chunks))
-    report_most_common(var_chunks, 20)
-    print()
-
-    #for var in var_chunks:
-    #    if var['fullname'] == 'self' and not (isinstance(var['type'], dict) and var['type']['.class'] == 'AnyType'):
-    #        print(var)
-    #argument_chunks = list(pluck("Argument", class_chunks))
-
-    symbol_table_node_chunks = list(pluck("SymbolTableNode", class_chunks))
-    report_most_common(symbol_table_node_chunks, 20)
-
-    print()
-    print("Most common")
-    report_most_common(class_chunks, 20)
-    print()'''
-
-
-if __name__ == '__main__':
-    main()
diff --git a/misc/async_matrix.py b/misc/async_matrix.py
deleted file mode 100644
index e9a758a..0000000
--- a/misc/async_matrix.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python3
-"""Test various combinations of generators/coroutines.
-
-This was used to cross-check the errors in the test case
-testFullCoroutineMatrix in test-data/unit/check-async-await.test.
-"""
-
-import sys
-from types import coroutine
-from typing import Any, AsyncIterator, Awaitable, Generator, Iterator
-
-# The various things you might try to use in `await` or `yield from`.
-
-def plain_generator() -> Generator[str, None, int]:
-    yield 'a'
-    return 1
-
-async def plain_coroutine() -> int:
-    return 1
-
- at coroutine
-def decorated_generator() -> Generator[str, None, int]:
-    yield 'a'
-    return 1
-
- at coroutine
-async def decorated_coroutine() -> int:
-    return 1
-
-class It(Iterator[str]):
-    stop = False
-    def __iter__(self) -> 'It':
-        return self
-    def __next__(self) -> str:
-        if self.stop:
-            raise StopIteration('end')
-        else:
-            self.stop = True
-            return 'a'
-
-def other_iterator() -> It:
-    return It()
-
-class Aw(Awaitable[int]):
-    def __await__(self) -> Generator[str, Any, int]:
-        yield 'a'
-        return 1
-
-def other_coroutine() -> Aw:
-    return Aw()
-
-# The various contexts in which `await` or `yield from` might occur.
-
-def plain_host_generator(func) -> Generator[str, None, None]:
-    yield 'a'
-    x = 0
-    f = func()
-    try:
-        x = yield from f
-    finally:
-        try:
-            f.close()
-        except AttributeError:
-            pass
-
-async def plain_host_coroutine(func) -> None:
-    x = 0
-    x = await func()
-
- at coroutine
-def decorated_host_generator(func) -> Generator[str, None, None]:
-    yield 'a'
-    x = 0
-    f = func()
-    try:
-        x = yield from f
-    finally:
-        try:
-            f.close()
-        except AttributeError:
-            pass
-
- at coroutine
-async def decorated_host_coroutine(func) -> None:
-    x = 0
-    x = await func()
-
-# Main driver.
-
-def main():
-    verbose = ('-v' in sys.argv)
-    for host in [plain_host_generator, plain_host_coroutine,
-                 decorated_host_generator, decorated_host_coroutine]:
-        print()
-        print("==== Host:", host.__name__)
-        for func in [plain_generator, plain_coroutine,
-                     decorated_generator, decorated_coroutine,
-                     other_iterator, other_coroutine]:
-            print("  ---- Func:", func.__name__)
-            try:
-                f = host(func)
-                for i in range(10):
-                    try:
-                        x = f.send(None)
-                        if verbose:
-                            print("    yield:", x)
-                    except StopIteration as e:
-                        if verbose:
-                            print("    stop:", e.value)
-                        break
-                else:
-                    if verbose:
-                        print("    ???? still going")
-            except Exception as e:
-                print("    error:", repr(e))
-
-# Run main().
-
-if __name__ == '__main__':
-    main()
diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py
deleted file mode 100644
index 0b552bf..0000000
--- a/misc/fix_annotate.py
+++ /dev/null
@@ -1,219 +0,0 @@
-"""Fixer for lib2to3 that inserts mypy annotations into all methods.
-
-The simplest way to run this is to copy it into lib2to3's "fixes"
-subdirectory and then run "2to3 -f annotate" over your files.
-
-The fixer transforms e.g.
-
-  def foo(self, bar, baz=12):
-      return bar + baz
-
-into
-
-  def foo(self, bar, baz=12):
-      # type: (Any, int) -> Any
-      return bar + baz
-
-It does not do type inference but it recognizes some basic default
-argument values such as numbers and strings (and assumes their type
-implies the argument type).
-
-It also uses some basic heuristics to decide whether to ignore the
-first argument:
-
-  - always if it's named 'self'
-  - if there's a @classmethod decorator
-
-Finally, it knows that __init__() is supposed to return None.
-"""
-
-from __future__ import print_function
-
-import os
-import re
-
-from lib2to3.fixer_base import BaseFix
-from lib2to3.patcomp import compile_pattern
-from lib2to3.pytree import Leaf, Node
-from lib2to3.fixer_util import token, syms, touch_import
-
-
-class FixAnnotate(BaseFix):
-
-    # This fixer is compatible with the bottom matcher.
-    BM_compatible = True
-
-    # This fixer shouldn't run by default.
-    explicit = True
-
-    # The pattern to match.
-    PATTERN = """
-              funcdef< 'def' name=any parameters< '(' [args=any] ')' > ':' suite=any+ >
-              """
-
-    counter = None if not os.getenv('MAXFIXES') else int(os.getenv('MAXFIXES'))
-
-    def transform(self, node, results):
-        if FixAnnotate.counter is not None:
-            if FixAnnotate.counter <= 0:
-                return
-        suite = results['suite']
-        children = suite[0].children
-
-        # NOTE: I've reverse-engineered the structure of the parse tree.
-        # It's always a list of nodes, the first of which contains the
-        # entire suite.  Its children seem to be:
-        #
-        #   [0] NEWLINE
-        #   [1] INDENT
-        #   [2...n-2] statements (the first may be a docstring)
-        #   [n-1] DEDENT
-        #
-        # Comments before the suite are part of the INDENT's prefix.
-        #
-        # "Compact" functions (e.g. "def foo(x, y): return max(x, y)")
-        # have a different structure that isn't matched by PATTERN.
-
-        ## print('-'*60)
-        ## print(node)
-        ## for i, ch in enumerate(children):
-        ##     print(i, repr(ch.prefix), repr(ch))
-
-        # Check if there's already an annotation.
-        for ch in children:
-            if ch.prefix.lstrip().startswith('# type:'):
-                return  # There's already a # type: comment here; don't change anything.
-
-        # Compute the annotation
-        annot = self.make_annotation(node, results)
-
-        # Insert '# type: {annot}' comment.
-        # For reference, see lib2to3/fixes/fix_tuple_params.py in stdlib.
-        if len(children) >= 2 and children[1].type == token.INDENT:
-            children[1].prefix = '%s# type: %s\n%s' % (children[1].value, annot, children[1].prefix)
-            children[1].changed()
-            if FixAnnotate.counter is not None:
-                FixAnnotate.counter -= 1
-
-        # Also add 'from typing import Any' at the top.
-        if 'Any' in annot:
-            touch_import('typing', 'Any', node)
-
-    def make_annotation(self, node, results):
-        name = results['name']
-        assert isinstance(name, Leaf), repr(name)
-        assert name.type == token.NAME, repr(name)
-        decorators = self.get_decorators(node)
-        is_method = self.is_method(node)
-        if name.value == '__init__' or not self.has_return_exprs(node):
-            restype = 'None'
-        else:
-            restype = 'Any'
-        args = results.get('args')
-        argtypes = []
-        if isinstance(args, Node):
-            children = args.children
-        elif isinstance(args, Leaf):
-            children = [args]
-        else:
-            children = []
-        # Interpret children according to the following grammar:
-        # (('*'|'**')? NAME ['=' expr] ','?)*
-        stars = inferred_type = ''
-        in_default = False
-        at_start = True
-        for child in children:
-            if isinstance(child, Leaf):
-                if child.value in ('*', '**'):
-                    stars += child.value
-                elif child.type == token.NAME and not in_default:
-                    if not is_method or not at_start or 'staticmethod' in decorators:
-                        inferred_type = 'Any'
-                    else:
-                        # Always skip the first argument if it's named 'self'.
-                        # Always skip the first argument of a class method.
-                        if  child.value == 'self' or 'classmethod' in decorators:
-                            pass
-                        else:
-                            inferred_type = 'Any'
-                elif child.value == '=':
-                    in_default = True
-                elif in_default and child.value != ',':
-                    if child.type == token.NUMBER:
-                        if re.match(r'\d+[lL]?$', child.value):
-                            inferred_type = 'int'
-                        else:
-                            inferred_type = 'float'  # TODO: complex?
-                    elif child.type == token.STRING:
-                        if child.value.startswith(('u', 'U')):
-                            inferred_type = 'unicode'
-                        else:
-                            inferred_type = 'str'
-                    elif child.type == token.NAME and child.value in ('True', 'False'):
-                        inferred_type = 'bool'
-                elif child.value == ',':
-                    if inferred_type:
-                        argtypes.append(stars + inferred_type)
-                    # Reset
-                    stars = inferred_type = ''
-                    in_default = False
-                    at_start = False
-        if inferred_type:
-            argtypes.append(stars + inferred_type)
-        return '(' + ', '.join(argtypes) + ') -> ' + restype
-
-    # The parse tree has a different shape when there is a single
-    # decorator vs. when there are multiple decorators.
-    DECORATED = "decorated< (d=decorator | decorators< dd=decorator+ >) funcdef >"
-    decorated = compile_pattern(DECORATED)
-
-    def get_decorators(self, node):
-        """Return a list of decorators found on a function definition.
-
-        This is a list of strings; only simple decorators
-        (e.g. @staticmethod) are returned.
-
-        If the function is undecorated or only non-simple decorators
-        are found, return [].
-        """
-        if node.parent is None:
-            return []
-        results = {}
-        if not self.decorated.match(node.parent, results):
-            return []
-        decorators = results.get('dd') or [results['d']]
-        decs = []
-        for d in decorators:
-            for child in d.children:
-                if isinstance(child, Leaf) and child.type == token.NAME:
-                    decs.append(child.value)
-        return decs
-
-    def is_method(self, node):
-        """Return whether the node occurs (directly) inside a class."""
-        node = node.parent
-        while node is not None:
-            if node.type == syms.classdef:
-                return True
-            if node.type == syms.funcdef:
-                return False
-            node = node.parent
-        return False
-
-    RETURN_EXPR = "return_stmt< 'return' any >"
-    return_expr = compile_pattern(RETURN_EXPR)
-
-    def has_return_exprs(self, node):
-        """Traverse the tree below node looking for 'return expr'.
-
-        Return True if at least 'return expr' is found, False if not.
-        (If both 'return' and 'return expr' are found, return True.)
-        """
-        results = {}
-        if self.return_expr.match(node, results):
-            return True
-        for child in node.children:
-            if child.type not in (syms.funcdef, syms.classdef):
-                if self.has_return_exprs(child):
-                    return True
-        return False
diff --git a/misc/incremental_checker.py b/misc/incremental_checker.py
deleted file mode 100755
index 515e662..0000000
--- a/misc/incremental_checker.py
+++ /dev/null
@@ -1,356 +0,0 @@
-#!/usr/bin/env python3
-"""
-This file compares the output and runtime of running normal vs incremental mode
-on the history of any arbitrary git repo as a way of performing a sanity check
-to make sure incremental mode is working correctly and efficiently.
-
-It does so by first running mypy without incremental mode on the specified range
-of commits to find the expected result, then rewinds back to the first commit and
-re-runs mypy on the commits with incremental mode enabled to make sure it returns
-the same results.
-
-This script will download and test the offical mypy repo by default. Running:
-
-    python3 misc/incremental_checker.py last 30
-
-is equivalent to running
-
-    python3 misc/incremental_checker.py last 30 \\
-            --repo_url https://github.com/python/mypy.git \\
-            --file-path mypy
-
-You can chose to run this script against a specific commit id or against the
-last n commits.
-
-To run this script against the last 30 commits:
-
-    python3 misc/incremental_checker.py last 30
-
-To run this script starting from the commit id 2a432b:
-
-    python3 misc/incremental_checker.py commit 2a432b
-"""
-
-from typing import Any, Dict, List, Optional, Tuple
-
-from argparse import (ArgumentParser, RawDescriptionHelpFormatter,
-                      ArgumentDefaultsHelpFormatter, Namespace)
-import base64
-import json
-import os
-import random
-import shutil
-import subprocess
-import sys
-import textwrap
-import time
-
-
-CACHE_PATH = ".incremental_checker_cache.json"
-MYPY_REPO_URL = "https://github.com/python/mypy.git"
-MYPY_TARGET_FILE = "mypy"
-
-JsonDict = Dict[str, Any]
-
-
-def print_offset(text: str, indent_length: int = 4) -> None:
-    print()
-    print(textwrap.indent(text, ' ' * indent_length))
-    print()
-
-
-def delete_folder(folder_path: str) -> None:
-    if os.path.exists(folder_path):
-        shutil.rmtree(folder_path)
-
-
-def execute(command: List[str], fail_on_error: bool = True) -> Tuple[str, str, int]:
-    proc = subprocess.Popen(
-        ' '.join(command),
-        stderr=subprocess.PIPE,
-        stdout=subprocess.PIPE,
-        shell=True)
-    stdout_bytes, stderr_bytes = proc.communicate()  # type: Tuple[bytes, bytes]
-    stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
-    if fail_on_error and proc.returncode != 0:
-        print('EXECUTED COMMAND:', repr(command))
-        print('RETURN CODE:', proc.returncode)
-        print()
-        print('STDOUT:')
-        print_offset(stdout)
-        print('STDERR:')
-        print_offset(stderr)
-        raise RuntimeError('Unexpected error from external tool.')
-    return stdout, stderr, proc.returncode
-
-
-def ensure_environment_is_ready(mypy_path: str, temp_repo_path: str, mypy_cache_path: str) -> None:
-    os.chdir(mypy_path)
-    delete_folder(temp_repo_path)
-    delete_folder(mypy_cache_path)
-
-
-def initialize_repo(repo_url: str, temp_repo_path: str, branch: str) -> None:
-    print("Cloning repo {0} to {1}".format(repo_url, temp_repo_path))
-    execute(["git", "clone", repo_url, temp_repo_path])
-    if branch is not None:
-        print("Checking out branch {}".format(branch))
-        execute(["git", "-C", temp_repo_path, "checkout", branch])
-
-
-def get_commits(repo_folder_path: str, commit_range: str) -> List[Tuple[str, str]]:
-    raw_data, _stderr, _errcode = execute([
-        "git", "-C", repo_folder_path, "log", "--reverse", "--oneline", commit_range])
-    output = []
-    for line in raw_data.strip().split('\n'):
-        commit_id, _, message = line.partition(' ')
-        output.append((commit_id, message))
-    return output
-
-
-def get_commits_starting_at(repo_folder_path: str, start_commit: str) -> List[Tuple[str, str]]:
-    print("Fetching commits starting at {0}".format(start_commit))
-    return get_commits(repo_folder_path, '{0}^..HEAD'.format(start_commit))
-
-
-def get_nth_commit(repo_folder_path, n: int) -> Tuple[str, str]:
-    print("Fetching last {} commits (or all, if there are fewer commits than n)".format(n))
-    return get_commits(repo_folder_path, '-{}'.format(n))[0]
-
-
-def run_mypy(target_file_path: Optional[str],
-             mypy_cache_path: str,
-             mypy_script: Optional[str],
-             incremental: bool = True,
-             verbose: bool = False) -> Tuple[float, str]:
-    """Runs mypy against `target_file_path` and returns what mypy prints to stdout as a string.
-
-    If `incremental` is set to True, this function will use store and retrieve all caching data
-    inside `mypy_cache_path`. If `verbose` is set to True, this function will pass the "-v -v"
-    flags to mypy to make it output debugging information.
-    """
-    if mypy_script is None:
-        command = ["python3", "-m", "mypy"]
-    else:
-        command = [mypy_script]
-    command.extend(["--cache-dir", mypy_cache_path])
-    if incremental:
-        command.append("--incremental")
-    if verbose:
-        command.extend(["-v", "-v"])
-    if target_file_path is not None:
-        command.append(target_file_path)
-    start = time.time()
-    output, stderr, _ = execute(command, False)
-    if stderr != "":
-        output = stderr
-    runtime = time.time() - start
-    return runtime, output
-
-
-def load_cache(incremental_cache_path: str = CACHE_PATH) -> JsonDict:
-    if os.path.exists(incremental_cache_path):
-        with open(incremental_cache_path, 'r') as stream:
-            return json.load(stream)
-    else:
-        return {}
-
-
-def save_cache(cache: JsonDict, incremental_cache_path: str = CACHE_PATH) -> None:
-    with open(incremental_cache_path, 'w') as stream:
-        json.dump(cache, stream, indent=2)
-
-
-def set_expected(commits: List[Tuple[str, str]],
-                 cache: JsonDict,
-                 temp_repo_path: str,
-                 target_file_path: Optional[str],
-                 mypy_cache_path: str,
-                 mypy_script: Optional[str]) -> None:
-    """Populates the given `cache` with the expected results for all of the given `commits`.
-
-    This function runs mypy on the `target_file_path` inside the `temp_repo_path`, and stores
-    the result in the `cache`.
-
-    If `cache` already contains results for a particular commit, this function will
-    skip evaluating that commit and move on to the next."""
-    for commit_id, message in commits:
-        if commit_id in cache:
-            print('Skipping commit (already cached): {0}: "{1}"'.format(commit_id, message))
-        else:
-            print('Caching expected output for commit {0}: "{1}"'.format(commit_id, message))
-            execute(["git", "-C", temp_repo_path, "checkout", commit_id])
-            runtime, output = run_mypy(target_file_path, mypy_cache_path, mypy_script,
-                                       incremental=False)
-            cache[commit_id] = {'runtime': runtime, 'output': output}
-            if output == "":
-                print("    Clean output ({:.3f} sec)".format(runtime))
-            else:
-                print("    Output ({:.3f} sec)".format(runtime))
-                print_offset(output, 8)
-    print()
-
-
-def test_incremental(commits: List[Tuple[str, str]],
-                     cache: JsonDict,
-                     temp_repo_path: str,
-                     target_file_path: Optional[str],
-                     mypy_cache_path: str,
-                     mypy_script: Optional[str]) -> None:
-    """Runs incremental mode on all `commits` to verify the output matches the expected output.
-
-    This function runs mypy on the `target_file_path` inside the `temp_repo_path`. The
-    expected output must be stored inside of the given `cache`.
-    """
-    print("Note: first commit is evaluated twice to warm up cache")
-    commits = [commits[0]] + commits
-    for commit_id, message in commits:
-        print('Now testing commit {0}: "{1}"'.format(commit_id, message))
-        execute(["git", "-C", temp_repo_path, "checkout", commit_id])
-        runtime, output = run_mypy(target_file_path, mypy_cache_path, mypy_script,
-                                   incremental=True)
-        expected_runtime = cache[commit_id]['runtime']  # type: float
-        expected_output = cache[commit_id]['output']  # type: str
-        if output != expected_output:
-            print("    Output does not match expected result!")
-            print("    Expected output ({:.3f} sec):".format(expected_runtime))
-            print_offset(expected_output, 8)
-            print("    Actual output: ({:.3f} sec):".format(runtime))
-            print_offset(output, 8)
-        else:
-            print("    Output matches expected result!")
-            print("    Incremental: {:.3f} sec".format(runtime))
-            print("    Original:    {:.3f} sec".format(expected_runtime))
-
-
-def cleanup(temp_repo_path: str, mypy_cache_path: str) -> None:
-    delete_folder(temp_repo_path)
-    delete_folder(mypy_cache_path)
-
-
-def test_repo(target_repo_url: str, temp_repo_path: str,
-              target_file_path: Optional[str],
-              mypy_path: str, incremental_cache_path: str, mypy_cache_path: str,
-              range_type: str, range_start: str, branch: str,
-              params: Optional[Namespace] = None) -> None:
-    """Tests incremental mode against the repo specified in `target_repo_url`.
-
-    This algorithm runs in five main stages:
-
-    1.  Clones `target_repo_url` into the `temp_repo_path` folder locally,
-        checking out the specified `branch` if applicable.
-    2.  Examines the repo's history to get the list of all commits to
-        to test incremental mode on.
-    3.  Runs mypy WITHOUT incremental mode against the `target_file_path` (which is
-        assumed to be located inside the `temp_repo_path`), testing each commit
-        discovered in stage two.
-        -   If the results of running mypy WITHOUT incremental mode on a
-            particular commit are already cached inside the `incremental_cache_path`,
-            skip that commit to save time.
-        -   Cache the results after finishing.
-    4.  Rewind back to the first commit, and run mypy WITH incremental mode
-        against the `target_file_path` commit-by-commit, and compare to the expected
-        results found in stage 3.
-    5.  Delete all unnecessary temp files.
-    """
-    # Stage 1: Clone repo and get ready to being testing
-    ensure_environment_is_ready(mypy_path, temp_repo_path, mypy_cache_path)
-    initialize_repo(target_repo_url, temp_repo_path, branch)
-
-    # Stage 2: Get all commits we want to test
-    if range_type == "last":
-        start_commit = get_nth_commit(temp_repo_path, int(range_start))[0]
-    elif range_type == "commit":
-        start_commit = range_start
-    else:
-        raise RuntimeError("Invalid option: {}".format(range_type))
-    commits = get_commits_starting_at(temp_repo_path, start_commit)
-    if params is not None and params.sample:
-        seed = params.seed or base64.urlsafe_b64encode(os.urandom(15)).decode('ascii')
-        random.seed(seed)
-        commits = random.sample(commits, params.sample)
-        print("Sampled down to %d commits using random seed %s" % (len(commits), seed))
-
-    # Stage 3: Find and cache expected results for each commit (without incremental mode)
-    cache = load_cache(incremental_cache_path)
-    set_expected(commits, cache, temp_repo_path, target_file_path, mypy_cache_path,
-                 mypy_script=params.mypy_script)
-    save_cache(cache, incremental_cache_path)
-
-    # Stage 4: Rewind and re-run mypy (with incremental mode enabled)
-    test_incremental(commits, cache, temp_repo_path, target_file_path, mypy_cache_path,
-                     mypy_script=params.mypy_script)
-
-    # Stage 5: Remove temp files
-    cleanup(temp_repo_path, mypy_cache_path)
-
-
-def main() -> None:
-    help_factory = (lambda prog: RawDescriptionHelpFormatter(prog=prog, max_help_position=32))
-    parser = ArgumentParser(
-        prog='incremental_checker',
-        description=__doc__,
-        formatter_class=help_factory)
-
-    parser.add_argument("range_type", metavar="START_TYPE", choices=["last", "commit"],
-                        help="must be one of 'last' or 'commit'")
-    parser.add_argument("range_start", metavar="COMMIT_ID_OR_NUMBER",
-                        help="the commit id to start from, or the number of "
-                        "commits to move back (see above)")
-    parser.add_argument("-r", "--repo_url", default=MYPY_REPO_URL, metavar="URL",
-                        help="the repo to clone and run tests on")
-    parser.add_argument("-f", "--file-path", default=MYPY_TARGET_FILE, metavar="FILE",
-                        help="the name of the file or directory to typecheck")
-    parser.add_argument("--cache-path", default=CACHE_PATH, metavar="DIR",
-                        help="sets a custom location to store cache data")
-    parser.add_argument("--branch", default=None, metavar="NAME",
-                        help="check out and test a custom branch"
-                        "uses the default if not specified")
-    parser.add_argument("--sample", type=int, help="use a random sample of size SAMPLE")
-    parser.add_argument("--seed", type=str, help="random seed")
-    parser.add_argument("--mypy-script", type=str, help="alternate mypy script to run")
-
-    if len(sys.argv[1:]) == 0:
-        parser.print_help()
-        parser.exit()
-
-    params = parser.parse_args(sys.argv[1:])
-
-    # Make all paths absolute so we avoid having to worry about being in the right folder
-
-    # The path to this specific script (incremental_checker.py).
-    script_path = os.path.abspath(sys.argv[0])
-
-    # The path to the mypy repo.
-    mypy_path = os.path.abspath(os.path.dirname(os.path.dirname(script_path)))
-
-    # The folder the cloned repo will reside in.
-    temp_repo_path = os.path.abspath(os.path.join(mypy_path, "tmp_repo"))
-
-    # The particular file or package to typecheck inside the repo.
-    if params.file_path:
-        target_file_path = os.path.abspath(os.path.join(temp_repo_path, params.file_path))
-    else:
-        # Allow `-f ''` to clear target_file_path.
-        target_file_path = None
-
-    # The path to where the incremental checker cache data is stored.
-    incremental_cache_path = os.path.abspath(params.cache_path)
-
-    # The path to store the mypy incremental mode cache data
-    mypy_cache_path = os.path.abspath(os.path.join(mypy_path, "misc", ".mypy_cache"))
-
-    print("Assuming mypy is located at {0}".format(mypy_path))
-    print("Temp repo will be cloned at {0}".format(temp_repo_path))
-    print("Testing file/dir located at {0}".format(target_file_path))
-    print("Using cache data located at {0}".format(incremental_cache_path))
-    print()
-
-    test_repo(params.repo_url, temp_repo_path, target_file_path,
-              mypy_path, incremental_cache_path, mypy_cache_path,
-              params.range_type, params.range_start, params.branch,
-              params)
-
-
-if __name__ == '__main__':
-    main()
diff --git a/misc/macs.el b/misc/macs.el
deleted file mode 100644
index 67d80aa..0000000
--- a/misc/macs.el
+++ /dev/null
@@ -1,22 +0,0 @@
-; Example Emacs integration; shows type of expression in region.
-
-(defun mypy-show-region ()
-  "Show type of variable at point."
-  (interactive)
-  (let ((here (region-beginning))
-        (there (region-end))
-        (filename (buffer-file-name)))
-    (let ((hereline (line-number-at-pos here))
-          (herecol (save-excursion (goto-char here) (current-column)))
-          (thereline (line-number-at-pos there))
-          (therecol (save-excursion (goto-char there) (current-column))))
-      (shell-command
-       (format "cd ~/src/mypy; python3 ./scripts/find_type.py %s %s %s %s %s python3 -m mypy -i mypy"
-               filename hereline herecol thereline therecol)
-       )
-      )
-    )
-  )
-
-; I like to bind this to ^X-t.
-(global-set-key "\C-xt" 'mypy-show-region)
diff --git a/misc/perf_checker.py b/misc/perf_checker.py
deleted file mode 100644
index e55f8cc..0000000
--- a/misc/perf_checker.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python3
-
-from typing import Callable, List, Tuple
-
-import os
-import shutil
-import statistics
-import subprocess
-import textwrap
-import time
-
-
-class Command:
-    def __init__(self, setup: Callable[[], None], command: Callable[[], None]) -> None:
-        self.setup = setup
-        self.command = command
-
-
-def print_offset(text: str, indent_length: int = 4) -> None:
-    print()
-    print(textwrap.indent(text, ' ' * indent_length))
-    print()
-
-
-def delete_folder(folder_path: str) -> None:
-    if os.path.exists(folder_path):
-        shutil.rmtree(folder_path)
-
-
-def execute(command: List[str]) -> None:
-    proc = subprocess.Popen(
-        ' '.join(command),
-        stderr=subprocess.PIPE,
-        stdout=subprocess.PIPE,
-        shell=True)
-    stdout_bytes, stderr_bytes = proc.communicate()  # type: Tuple[bytes, bytes]
-    stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
-    if proc.returncode != 0:
-        print('EXECUTED COMMAND:', repr(command))
-        print('RETURN CODE:', proc.returncode)
-        print()
-        print('STDOUT:')
-        print_offset(stdout)
-        print('STDERR:')
-        print_offset(stderr)
-        raise RuntimeError('Unexpected error from external tool.')
-
-
-def trial(num_trials: int, command: Command) -> List[float]:
-    trials = []
-    for i in range(num_trials):
-        command.setup()
-        start = time.time()
-        command.command()
-        delta = time.time() - start
-        trials.append(delta)
-    return trials
-
-
-def report(name: str, times: List[float]) -> None:
-    print("{}:".format(name))
-    print("  Times: {}".format(times))
-    print("  Mean:  {}".format(statistics.mean(times)))
-    print("  Stdev: {}".format(statistics.stdev(times)))
-    print()
-
-
-def main() -> None:
-    trials = 3
-
-    print("Testing baseline")
-    baseline = trial(trials, Command(
-        lambda: None,
-        lambda: execute(["python3", "-m", "mypy", "mypy"])))
-    report("Baseline", baseline)
-
-    print("Testing cold cache")
-    cold_cache = trial(trials, Command(
-        lambda: delete_folder(".mypy_cache"),
-        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"])))
-    report("Cold cache", cold_cache)
-
-    print("Testing warm cache")
-    execute(["python3", "-m", "mypy", "-i", "mypy"])
-    warm_cache = trial(trials, Command(
-        lambda: None,
-        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"])))
-    report("Warm cache", warm_cache)
-
-
-if __name__ == '__main__':
-    main()
-
diff --git a/misc/remove-eol-whitespace.sh b/misc/remove-eol-whitespace.sh
deleted file mode 100644
index 3da6b9d..0000000
--- a/misc/remove-eol-whitespace.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-
-# Remove trailing whitespace from all non-binary files in a git repo.
-
-# From https://gist.github.com/dpaluy/3690668; originally from here:
-# http://unix.stackexchange.com/questions/36233/how-to-skip-file-in-sed-if-it-contains-regex/36240#36240
-
-git grep -I --name-only -z -e '' | xargs -0 sed -i -e 's/[ \t]\+\(\r\?\)$/\1/'
diff --git a/misc/test_case_to_actual.py b/misc/test_case_to_actual.py
deleted file mode 100644
index 9a91bb1..0000000
--- a/misc/test_case_to_actual.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from typing import Iterator, List
-import sys
-import os
-import os.path
-
-
-class Chunk:
-    def __init__(self, header_type: str, args: str) -> None:
-        self.header_type = header_type
-        self.args = args
-        self.lines = []  # type: List[str]
-
-
-def is_header(line: str) -> bool:
-    return line.startswith('[') and line.endswith(']')
-
-
-def normalize(lines: Iterator[str]) -> Iterator[str]:
-    return (line.rstrip() for line in lines)
-
-
-def produce_chunks(lines: Iterator[str]) -> Iterator[Chunk]:
-    current_chunk = None  # type: Chunk
-    for line in normalize(lines):
-        if is_header(line):
-            if current_chunk is not None:
-                yield current_chunk
-            parts = line[1:-1].split(' ', 1)
-            args = parts[1] if len(parts) > 1 else ''
-            current_chunk = Chunk(parts[0], args)
-        else:
-            current_chunk.lines.append(line)
-    if current_chunk is not None:
-        yield current_chunk
-
-
-def write_out(filename: str, lines: List[str]) -> None:
-    os.makedirs(os.path.dirname(filename), exist_ok=True)
-    with open(filename, 'w') as stream:
-        stream.write('\n'.join(lines))
-
-
-def write_tree(root: str, chunks: Iterator[Chunk]) -> None:
-    init = next(chunks)
-    assert init.header_type == 'case'
-    
-    root = os.path.join(root, init.args)
-    write_out(os.path.join(root, 'main.py'), init.lines)
-
-    for chunk in chunks:
-        if chunk.header_type == 'file' and chunk.args.endswith('.py'):
-            write_out(os.path.join(root, chunk.args), chunk.lines)
-
-
-def help() -> None:
-    print("Usage: python misc/test_case_to_actual.py test_file.txt root_path")
-
-
-def main() -> None:
-    if len(sys.argv) != 3:
-        help()
-        return
-
-    test_file_path, root_path = sys.argv[1], sys.argv[2]
-    with open(test_file_path, 'r') as stream:
-        chunks = produce_chunks(iter(stream))
-        write_tree(root_path, chunks)
-
-
-if __name__ == '__main__':
-    main()
diff --git a/misc/touch_checker.py b/misc/touch_checker.py
deleted file mode 100644
index c44afe4..0000000
--- a/misc/touch_checker.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python3
-
-from typing import Callable, List, Tuple, Optional
-
-import sys
-import glob
-import os
-import shutil
-import statistics
-import subprocess
-import textwrap
-import time
-
-
-def print_offset(text: str, indent_length: int = 4) -> None:
-    print()
-    print(textwrap.indent(text, ' ' * indent_length))
-    print()
-
-
-def delete_folder(folder_path: str) -> None:
-    if os.path.exists(folder_path):
-        shutil.rmtree(folder_path)
-
-
-def execute(command: List[str]) -> None:
-    proc = subprocess.Popen(
-        ' '.join(command),
-        stderr=subprocess.PIPE,
-        stdout=subprocess.PIPE,
-        shell=True)
-    stdout_bytes, stderr_bytes = proc.communicate()  # type: Tuple[bytes, bytes]
-    stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
-    if proc.returncode != 0:
-        print('EXECUTED COMMAND:', repr(command))
-        print('RETURN CODE:', proc.returncode)
-        print()
-        print('STDOUT:')
-        print_offset(stdout)
-        print('STDERR:')
-        print_offset(stderr)
-        print()
-
-
-Command = Callable[[], None]
-
-
-def test(setup: Command, command: Command, teardown: Command) -> float:
-    setup()
-    start = time.time()
-    command()
-    end = time.time() - start
-    teardown()
-    return end
-
-
-def make_touch_wrappers(filename: str) -> Tuple[Command, Command]:
-    def setup() -> None:
-        execute(["touch", filename])
-    def teardown() -> None:
-        pass
-    return setup, teardown
-
-
-def make_change_wrappers(filename: str) -> Tuple[Command, Command]:
-    copy = None  # type: Optional[str]
-
-    def setup() -> None:
-        nonlocal copy
-        with open(filename, 'r') as stream:
-            copy = stream.read()
-        with open(filename, 'a') as stream:
-            stream.write('\n\nfoo = 3')
-
-    def teardown() -> None:
-        assert copy is not None
-        with open(filename, 'w') as stream:
-            stream.write(copy)
-
-        # Re-run to reset cache
-        execute(["python3", "-m", "mypy", "-i", "mypy"]),
-
-    return setup, teardown
-
-def main() -> None:
-    if len(sys.argv) != 2 or sys.argv[1] not in {'touch', 'change'}:
-        print("First argument should be 'touch' or 'change'")
-        return
-
-    if sys.argv[1] == 'touch':
-        make_wrappers = make_touch_wrappers
-        verb = "Touching"
-    elif sys.argv[1] == 'change':
-        make_wrappers = make_change_wrappers
-        verb = "Changing"
-    else:
-        raise AssertionError()
-
-    print("Setting up...")
-
-    baseline = test(
-        lambda: None,
-        lambda: execute(["python3", "-m", "mypy", "mypy"]),
-        lambda: None)
-    print("Baseline:   {}".format(baseline))
-
-    cold = test(
-        lambda: delete_folder(".mypy_cache"),
-        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
-        lambda: None)
-    print("Cold cache: {}".format(cold))
-
-    warm = test(
-        lambda: None,
-        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
-        lambda: None)
-    print("Warm cache: {}".format(warm))
-
-    print()
-
-    deltas = []
-    for filename in glob.iglob("mypy/**/*.py", recursive=True):
-        print("{} {}".format(verb, filename))
-        
-        setup, teardown = make_wrappers(filename)
-        delta = test(
-            setup,
-            lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
-            teardown)
-        print("    Time: {}".format(delta))
-        deltas.append(delta)
-    print()
-
-    print("Initial:")
-    print("    Baseline:   {}".format(baseline))
-    print("    Cold cache: {}".format(cold))
-    print("    Warm cache: {}".format(warm))
-    print()
-    print("Aggregate:")
-    print("    Times:      {}".format(deltas))
-    print("    Mean:       {}".format(statistics.mean(deltas)))
-    print("    Median:     {}".format(statistics.median(deltas)))
-    print("    Stdev:      {}".format(statistics.stdev(deltas)))
-    print("    Min:        {}".format(min(deltas)))
-    print("    Max:        {}".format(max(deltas)))
-    print("    Total:      {}".format(sum(deltas)))
-    print()
-
-if __name__ == '__main__':
-    main()
-
diff --git a/misc/upload-pypi.py b/misc/upload-pypi.py
deleted file mode 100644
index 1b1e956..0000000
--- a/misc/upload-pypi.py
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env python3
-"""Build and upload mypy packages for Linux and macOS to PyPI.
-
-Note: This should be run on macOS using offical python.org Python 3.6 or
-      later, as this is the only tested configuration. Use --force to
-      run anyway.
-
-This uses a fresh repo clone and a fresh virtualenv to avoid depending on
-local state.
-
-Ideas for improvements:
-
-- also upload Windows wheels
-- try installing the generated packages and running mypy
-- try installing the uploaded packages and running mypy
-- run tests
-- verify that there is a green travis build
-
-"""
-
-import argparse
-import getpass
-import os
-import os.path
-import re
-import subprocess
-import sys
-import tempfile
-from typing import Any
-
-
-class Builder:
-    def __init__(self, version: str, force: bool, no_upload: bool) -> None:
-        if not re.match(r'0\.[0-9]{3}$', version):
-            sys.exit('Invalid version {!r} (expected form 0.123)'.format(version))
-        self.version = version
-        self.force = force
-        self.no_upload = no_upload
-        self.target_dir = tempfile.mkdtemp()
-        self.repo_dir = os.path.join(self.target_dir, 'mypy')
-
-    def build_and_upload(self) -> None:
-        self.prompt()
-        self.run_sanity_checks()
-        print('Temporary target directory: {}'.format(self.target_dir))
-        self.git_clone_repo()
-        self.git_check_out_tag()
-        self.verify_version()
-        self.make_virtualenv()
-        self.install_dependencies()
-        self.make_wheel()
-        self.make_sdist()
-        if not self.no_upload:
-            self.upload_wheel()
-            self.upload_sdist()
-            self.heading('Successfully uploaded wheel and sdist for mypy {}'.format(self.version))
-            print("<< Don't forget to upload Windows wheels! >>")
-        else:
-            self.heading('Successfully built wheel and sdist for mypy {}'.format(self.version))
-            dist_dir = os.path.join(self.repo_dir, 'dist')
-            print('Generated packages:'.format(dist_dir))
-            for fnam in sorted(os.listdir(dist_dir)):
-                print('  {}'.format(os.path.join(dist_dir, fnam)))
-
-    def prompt(self) -> None:
-        if self.force:
-            return
-        extra = '' if self.no_upload else ' and upload'
-        print('This will build{} PyPI packages for mypy {}.'.format(extra, self.version))
-        response = input('Proceeed? [yN] '.format(self.version))
-        if response.lower() != 'y':
-            sys.exit('Exiting')
-
-    def verify_version(self) -> None:
-        version_path = os.path.join(self.repo_dir, 'mypy', 'version.py')
-        with open(version_path) as f:
-            contents = f.read()
-        if "'{}'".format(self.version) not in contents:
-            sys.stderr.write(
-                '\nError: Version {} does not match {}/mypy/version.py\n'.format(
-                self.version, self.repo_dir))
-            sys.exit(2)
-
-    def run_sanity_checks(self) -> None:
-        if not sys.version_info >= (3, 6):
-            sys.exit('You must use Python 3.6 or later to build mypy')
-        if sys.platform != 'darwin' and not self.force:
-            sys.exit('You should run this on macOS; use --force to go ahead anyway')
-        os_file = os.path.realpath(os.__file__)
-        if not os_file.startswith('/Library/Frameworks') and not self.force:
-            # Be defensive -- Python from brew may produce bad packages, for example.
-            sys.exit('Error -- run this script using an official Python build from python.org')
-        if getpass.getuser() == 'root':
-            sys.exit('This script must not be run as root')
-
-    def git_clone_repo(self) -> None:
-        self.heading('Cloning mypy git repository')
-        self.run('git clone https://github.com/python/mypy')
-
-    def git_check_out_tag(self) -> None:
-        tag = 'v{}'.format(self.version)
-        self.heading('Check out {}'.format(tag))
-        self.run('cd mypy && git checkout {}'.format(tag))
-        self.run('cd mypy && git submodule update --init typeshed'.format(tag))
-
-    def make_virtualenv(self) -> None:
-        self.heading('Creating a fresh virtualenv')
-        self.run('virtualenv -p {} mypy-venv'.format(sys.executable))
-
-    def install_dependencies(self) -> None:
-        self.heading('Installing build dependencies')
-        self.run_in_virtualenv('pip3 install wheel twine && pip3 install -U setuptools')
-
-    def make_wheel(self) -> None:
-        self.heading('Building wheel')
-        self.run_in_virtualenv('python3 setup.py bdist_wheel')
-
-    def make_sdist(self) -> None:
-        self.heading('Building sdist')
-        self.run_in_virtualenv('python3 setup.py sdist')
-
-    def upload_wheel(self) -> None:
-        self.heading('Uploading wheel')
-        self.run_in_virtualenv('twine upload dist/mypy-{}-py3-none-any.whl'.format(self.version))
-
-    def upload_sdist(self) -> None:
-        self.heading('Uploading sdist')
-        self.run_in_virtualenv('twine upload dist/mypy-{}.tar.gz'.format(self.version))
-
-    def run(self, cmd: str) -> None:
-        try:
-            subprocess.check_call(cmd, shell=True, cwd=self.target_dir)
-        except subprocess.CalledProcessError:
-            sys.stderr.write('Error: Command {!r} failed\n'.format(cmd))
-            sys.exit(1)
-
-    def run_in_virtualenv(self, cmd: str) -> None:
-        self.run('source mypy-venv/bin/activate && cd mypy &&' + cmd)
-
-    def heading(self, heading: str) -> None:
-        print()
-        print('==== {} ===='.format(heading))
-        print()
-
-
-def parse_args() -> Any:
-    parser = argparse.ArgumentParser(
-        description='PyPI mypy package uploader (for non-Windows packages only)')
-    parser.add_argument('--force', action='store_true', default=False,
-                        help='Skip prompts and sanity checks (be careful!)')
-    parser.add_argument('--no-upload', action='store_true', default=False,
-                        help="Only build packages but don't upload")
-    parser.add_argument('version', help='Mypy version to release')
-    return parser.parse_args()
-
-
-if __name__ == '__main__':
-    args = parse_args()
-    builder = Builder(args.version, args.force, args.no_upload)
-    builder.build_and_upload()
diff --git a/misc/variadics.py b/misc/variadics.py
deleted file mode 100644
index 9200288..0000000
--- a/misc/variadics.py
+++ /dev/null
@@ -1,54 +0,0 @@
-"""Example of code generation approach to variadics.
-
-See https://github.com/python/typing/issues/193#issuecomment-236383893
-"""
-
-LIMIT = 5
-BOUND = 'object'
-
-def prelude(limit: int, bound: str) -> None:
-    print('from typing import Callable, Iterable, Iterator, Tuple, TypeVar, overload')
-    print('Ts = TypeVar(\'Ts\', bound={bound})'.format(bound=bound))
-    print('R = TypeVar(\'R\')')
-    for i in range(LIMIT):
-        print('T{i} = TypeVar(\'T{i}\', bound={bound})'.format(i=i+1, bound=bound))
-
-def expand_template(template: str,
-                    arg_template: str = 'arg{i}: {Ts}',
-                    lower: int = 0,
-                    limit: int = LIMIT) -> None:
-    print()
-    for i in range(lower, limit):
-        tvs = ', '.join('T{i}'.format(i=j+1) for j in range(i))
-        args = ', '.join(arg_template.format(i=j+1, Ts='T{}'.format(j+1))
-                         for j in range(i))
-        print('@overload')
-        s = template.format(Ts=tvs, argsTs=args)
-        s = s.replace('Tuple[]', 'Tuple[()]')
-        print(s)
-    args_l = [arg_template.format(i=j+1, Ts='Ts') for j in range(limit)]
-    args_l.append('*' + (arg_template.format(i='s', Ts='Ts')))
-    args = ', '.join(args_l)
-    s = template.format(Ts='Ts, ...', argsTs=args)
-    s = s.replace('Callable[[Ts, ...]', 'Callable[...')
-    print('@overload')
-    print(s)
-
-def main():
-    prelude(LIMIT, BOUND)
-
-    # map()
-    expand_template('def map(func: Callable[[{Ts}], R], {argsTs}) -> R: ...',
-                    lower=1)
-    # zip()
-    expand_template('def zip({argsTs}) -> Tuple[{Ts}]: ...')
-
-    # Naomi's examples
-    expand_template('def my_zip({argsTs}) -> Iterator[Tuple[{Ts}]]: ...',
-                    'arg{i}: Iterable[{Ts}]')
-    expand_template('def make_check({argsTs}) -> Callable[[{Ts}], bool]: ...')
-    expand_template('def my_map(f: Callable[[{Ts}], R], {argsTs}) -> Iterator[R]: ...',
-                    'arg{i}: Iterable[{Ts}]')
-                    
-
-main()
diff --git a/mypy.egg-info/PKG-INFO b/mypy.egg-info/PKG-INFO
index 641f4ed..c457957 100644
--- a/mypy.egg-info/PKG-INFO
+++ b/mypy.egg-info/PKG-INFO
@@ -1,11 +1,12 @@
 Metadata-Version: 1.1
 Name: mypy
-Version: 0.521
+Version: 0.530
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
 Author-email: jukka.lehtosalo at iki.fi
 License: MIT License
+Description-Content-Type: UNKNOWN
 Description: Mypy -- Optional Static Typing for Python
         =========================================
         
@@ -17,7 +18,7 @@ Description: Mypy -- Optional Static Typing for Python
         types.
         
 Platform: POSIX
-Classifier: Development Status :: 2 - Pre-Alpha
+Classifier: Development Status :: 3 - Alpha
 Classifier: Environment :: Console
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
diff --git a/mypy.egg-info/SOURCES.txt b/mypy.egg-info/SOURCES.txt
index 0316925..8ec764a 100644
--- a/mypy.egg-info/SOURCES.txt
+++ b/mypy.egg-info/SOURCES.txt
@@ -1,22 +1,9 @@
-.gitignore
-.gitmodules
-.travis.yml
-CONTRIBUTING.md
-CREDITS
-LICENSE
 MANIFEST.in
 README.md
-ROADMAP.md
-appveyor.yml
-build-requirements.txt
-conftest.py
 mypy_self_check.ini
-pytest.ini
 runtests.py
 setup.cfg
 setup.py
-test-requirements.txt
-typeshed
 docs/Makefile
 docs/README.md
 docs/make.bat
@@ -48,19 +35,8 @@ docs/source/supported_python_features.rst
 docs/source/type_inference_and_annotations.rst
 extensions/README.md
 extensions/mypy_extensions.py
+extensions/setup.cfg
 extensions/setup.py
-misc/actions_stubs.py
-misc/analyze_cache.py
-misc/async_matrix.py
-misc/fix_annotate.py
-misc/incremental_checker.py
-misc/macs.el
-misc/perf_checker.py
-misc/remove-eol-whitespace.sh
-misc/test_case_to_actual.py
-misc/touch_checker.py
-misc/upload-pypi.py
-misc/variadics.py
 mypy/__init__.py
 mypy/__main__.py
 mypy/api.py
@@ -85,6 +61,7 @@ mypy/git.py
 mypy/indirection.py
 mypy/infer.py
 mypy/join.py
+mypy/literals.py
 mypy/main.py
 mypy/maptype.py
 mypy/meet.py
@@ -160,24 +137,13 @@ mypy/test/testtransform.py
 mypy/test/testtypegen.py
 mypy/test/testtypes.py
 mypy/test/update.py
-pinfer/.gitignore
-pinfer/LICENSE
-pinfer/README
-pinfer/__init__.py
-pinfer/inspect3.py
-pinfer/p.py
-pinfer/pinfer.py
-pinfer/test_pinfer.py
-pinfer/test_pinfer3.py
-pinfer/unparse.py
-pinfer/unparse3.py
 scripts/dumpmodule.py
 scripts/find_type.py
 scripts/mypy
 scripts/mypy.bat
+scripts/myunit
 scripts/stubgen
 scripts/stubtest.py
-scripts/__pycache__/dumpmodule.cpython-36.pyc
 test-data/.flake8
 test-data/samples/bottles.py
 test-data/samples/class.py
@@ -246,6 +212,7 @@ test-data/unit/check-classes.test
 test-data/unit/check-classvar.test
 test-data/unit/check-columns.test
 test-data/unit/check-custom-plugin.test
+test-data/unit/check-default-plugin.test
 test-data/unit/check-dynamic-typing.test
 test-data/unit/check-enum.test
 test-data/unit/check-expressions.test
@@ -269,6 +236,7 @@ test-data/unit/check-newsyntax.test
 test-data/unit/check-newtype.test
 test-data/unit/check-optional.test
 test-data/unit/check-overloading.test
+test-data/unit/check-protocols.test
 test-data/unit/check-python2.test
 test-data/unit/check-selftype.test
 test-data/unit/check-semanal-error.test
@@ -298,6 +266,7 @@ test-data/unit/parse.test
 test-data/unit/python2eval.test
 test-data/unit/pythoneval-asyncio.test
 test-data/unit/pythoneval.test
+test-data/unit/reports.test
 test-data/unit/semanal-abstractclasses.test
 test-data/unit/semanal-basic.test
 test-data/unit/semanal-classes.test
@@ -354,12 +323,14 @@ test-data/unit/lib-stub/__builtin__.pyi
 test-data/unit/lib-stub/abc.pyi
 test-data/unit/lib-stub/builtins.pyi
 test-data/unit/lib-stub/collections.pyi
+test-data/unit/lib-stub/contextlib.pyi
 test-data/unit/lib-stub/enum.pyi
 test-data/unit/lib-stub/mypy_extensions.pyi
 test-data/unit/lib-stub/six.pyi
 test-data/unit/lib-stub/sys.pyi
 test-data/unit/lib-stub/types.pyi
 test-data/unit/lib-stub/typing.pyi
+test-data/unit/lib-stub/typing_extensions.pyi
 test-data/unit/plugins/attrhook.py
 test-data/unit/plugins/badreturn.py
 test-data/unit/plugins/badreturn2.py
@@ -368,7 +339,6 @@ test-data/unit/plugins/named_callable.py
 test-data/unit/plugins/noentry.py
 test-data/unit/plugins/plugin2.py
 test-data/unit/plugins/type_anal_hook.py
-tmp-test-dirs/.gitignore
 typeshed/stdlib/2/BaseHTTPServer.pyi
 typeshed/stdlib/2/ConfigParser.pyi
 typeshed/stdlib/2/Cookie.pyi
@@ -437,6 +407,7 @@ typeshed/stdlib/2/macpath.pyi
 typeshed/stdlib/2/markupbase.pyi
 typeshed/stdlib/2/md5.pyi
 typeshed/stdlib/2/mimetools.pyi
+typeshed/stdlib/2/mutex.pyi
 typeshed/stdlib/2/ntpath.pyi
 typeshed/stdlib/2/nturl2path.pyi
 typeshed/stdlib/2/os2emxpath.pyi
@@ -461,9 +432,12 @@ typeshed/stdlib/2/shutil.pyi
 typeshed/stdlib/2/signal.pyi
 typeshed/stdlib/2/smtplib.pyi
 typeshed/stdlib/2/spwd.pyi
+typeshed/stdlib/2/sre_constants.pyi
+typeshed/stdlib/2/sre_parse.pyi
 typeshed/stdlib/2/ssl.pyi
 typeshed/stdlib/2/stat.pyi
 typeshed/stdlib/2/string.pyi
+typeshed/stdlib/2/stringold.pyi
 typeshed/stdlib/2/strop.pyi
 typeshed/stdlib/2/subprocess.pyi
 typeshed/stdlib/2/symbol.pyi
@@ -489,6 +463,7 @@ typeshed/stdlib/2/email/__init__.pyi
 typeshed/stdlib/2/email/_parseaddr.pyi
 typeshed/stdlib/2/email/utils.pyi
 typeshed/stdlib/2/email/mime/__init__.pyi
+typeshed/stdlib/2/email/mime/application.pyi
 typeshed/stdlib/2/email/mime/base.pyi
 typeshed/stdlib/2/email/mime/multipart.pyi
 typeshed/stdlib/2/email/mime/nonmultipart.pyi
@@ -524,10 +499,12 @@ typeshed/stdlib/2and3/bz2.pyi
 typeshed/stdlib/2and3/cProfile.pyi
 typeshed/stdlib/2and3/calendar.pyi
 typeshed/stdlib/2and3/cgi.pyi
+typeshed/stdlib/2and3/chunk.pyi
 typeshed/stdlib/2and3/cmath.pyi
 typeshed/stdlib/2and3/cmd.pyi
 typeshed/stdlib/2and3/code.pyi
 typeshed/stdlib/2and3/codecs.pyi
+typeshed/stdlib/2and3/codeop.pyi
 typeshed/stdlib/2and3/colorsys.pyi
 typeshed/stdlib/2and3/contextlib.pyi
 typeshed/stdlib/2and3/copy.pyi
@@ -538,6 +515,7 @@ typeshed/stdlib/2and3/doctest.pyi
 typeshed/stdlib/2and3/errno.pyi
 typeshed/stdlib/2and3/filecmp.pyi
 typeshed/stdlib/2and3/fileinput.pyi
+typeshed/stdlib/2and3/formatter.pyi
 typeshed/stdlib/2and3/fractions.pyi
 typeshed/stdlib/2and3/ftplib.pyi
 typeshed/stdlib/2and3/grp.pyi
@@ -684,6 +662,7 @@ typeshed/stdlib/3/_operator.pyi
 typeshed/stdlib/3/_posixsubprocess.pyi
 typeshed/stdlib/3/_subprocess.pyi
 typeshed/stdlib/3/_thread.pyi
+typeshed/stdlib/3/_threading_local.pyi
 typeshed/stdlib/3/_warnings.pyi
 typeshed/stdlib/3/abc.pyi
 typeshed/stdlib/3/array.pyi
@@ -731,6 +710,8 @@ typeshed/stdlib/3/signal.pyi
 typeshed/stdlib/3/smtplib.pyi
 typeshed/stdlib/3/socketserver.pyi
 typeshed/stdlib/3/spwd.pyi
+typeshed/stdlib/3/sre_constants.pyi
+typeshed/stdlib/3/sre_parse.pyi
 typeshed/stdlib/3/ssl.pyi
 typeshed/stdlib/3/stat.pyi
 typeshed/stdlib/3/string.pyi
@@ -786,7 +767,7 @@ typeshed/stdlib/3/email/message.pyi
 typeshed/stdlib/3/email/parser.pyi
 typeshed/stdlib/3/email/policy.pyi
 typeshed/stdlib/3/email/utils.pyi
-typeshed/stdlib/3/email/mime/__init__.py
+typeshed/stdlib/3/email/mime/__init__.pyi
 typeshed/stdlib/3/email/mime/application.pyi
 typeshed/stdlib/3/email/mime/audio.pyi
 typeshed/stdlib/3/email/mime/base.pyi
@@ -969,8 +950,10 @@ typeshed/third_party/2/werkzeug/debug/repr.pyi
 typeshed/third_party/2/werkzeug/debug/tbtools.pyi
 typeshed/third_party/2and3/backports_abc.pyi
 typeshed/third_party/2and3/certifi.pyi
+typeshed/third_party/2and3/emoji.pyi
 typeshed/third_party/2and3/mypy_extensions.pyi
 typeshed/third_party/2and3/singledispatch.pyi
+typeshed/third_party/2and3/typing_extensions.pyi
 typeshed/third_party/2and3/ujson.pyi
 typeshed/third_party/2and3/Crypto/__init__.pyi
 typeshed/third_party/2and3/Crypto/pct_warnings.pyi
diff --git a/mypy.egg-info/requires.txt b/mypy.egg-info/requires.txt
index adb6794..39548ec 100644
--- a/mypy.egg-info/requires.txt
+++ b/mypy.egg-info/requires.txt
@@ -1 +1,4 @@
-typed-ast<1.1.0,>=1.0.4
+typed-ast<1.2.0,>=1.1.0
+
+[:python_version < "3.5"]
+typing>=3.5.3
diff --git a/mypy/applytype.py b/mypy/applytype.py
index 6d2f3a9..db93c46 100644
--- a/mypy/applytype.py
+++ b/mypy/applytype.py
@@ -1,4 +1,4 @@
-from typing import List, Dict
+from typing import List, Dict, Sequence, Optional
 
 import mypy.subtypes
 from mypy.sametypes import is_same_type
@@ -8,7 +8,7 @@ from mypy.messages import MessageBuilder
 from mypy.nodes import Context
 
 
-def apply_generic_arguments(callable: CallableType, types: List[Type],
+def apply_generic_arguments(callable: CallableType, orig_types: Sequence[Optional[Type]],
                             msg: MessageBuilder, context: Context) -> CallableType:
     """Apply generic type arguments to a callable type.
 
@@ -18,11 +18,12 @@ def apply_generic_arguments(callable: CallableType, types: List[Type],
     Note that each type can be None; in this case, it will not be applied.
     """
     tvars = callable.variables
-    assert len(tvars) == len(types)
+    assert len(tvars) == len(orig_types)
     # Check that inferred type variable values are compatible with allowed
     # values and bounds.  Also, promote subtype values to allowed values.
-    types = types[:]
+    types = list(orig_types)
     for i, type in enumerate(types):
+        assert not isinstance(type, PartialType), "Internal error: must never apply partial type"
         values = callable.variables[i].values
         if values and type:
             if isinstance(type, AnyType):
@@ -34,22 +35,21 @@ def apply_generic_arguments(callable: CallableType, types: List[Type],
                        for v1 in type.values):
                     continue
             for value in values:
-                if isinstance(type, PartialType) or mypy.subtypes.is_subtype(type, value):
+                if mypy.subtypes.is_subtype(type, value):
                     types[i] = value
                     break
             else:
-                msg.incompatible_typevar_value(callable, i + 1, type, context)
-
+                msg.incompatible_typevar_value(callable, type, callable.variables[i].name, context)
         upper_bound = callable.variables[i].upper_bound
-        if (type and not isinstance(type, PartialType) and
-                not mypy.subtypes.is_subtype(type, upper_bound)):
-            msg.incompatible_typevar_value(callable, i + 1, type, context)
+        if type and not mypy.subtypes.is_subtype(type, upper_bound):
+            msg.incompatible_typevar_value(callable, type, callable.variables[i].name, context)
 
     # Create a map from type variable id to target type.
     id_to_type = {}  # type: Dict[TypeVarId, Type]
     for i, tv in enumerate(tvars):
-        if types[i]:
-            id_to_type[tv.id] = types[i]
+        typ = types[i]
+        if typ:
+            id_to_type[tv.id] = typ
 
     # Apply arguments to argument types.
     arg_types = [expand_type(at, id_to_type) for at in callable.arg_types]
diff --git a/mypy/binder.py b/mypy/binder.py
index 2a54859..956c950 100644
--- a/mypy/binder.py
+++ b/mypy/binder.py
@@ -1,13 +1,12 @@
 from typing import Dict, List, Set, Iterator, Union, Optional, cast
 from contextlib import contextmanager
 
-from mypy.types import Type, AnyType, PartialType, UnionType, NoneTyp
-from mypy.nodes import (Key, Node, Expression, Var, RefExpr, SymbolTableNode)
-
+from mypy.types import Type, AnyType, PartialType, UnionType, TypeOfAny
 from mypy.subtypes import is_subtype
 from mypy.join import join_simple
 from mypy.sametypes import is_same_type
-
+from mypy.nodes import Expression, Var, RefExpr
+from mypy.literals import Key, literal, literal_hash, subkeys
 from mypy.nodes import IndexExpr, MemberExpr, NameExpr
 
 
@@ -61,7 +60,7 @@ class ConditionalTypeBinder:
 
     def __init__(self) -> None:
         # The stack of frames currently used.  These map
-        # expr.literal_hash -- literals like 'foo.bar' --
+        # literal_hash(expr) -- literals like 'foo.bar' --
         # to types. The last element of this list is the
         # top-most, current frame. Each earlier element
         # records the state as of when that frame was last
@@ -75,7 +74,7 @@ class ConditionalTypeBinder:
         # has no corresponding element in this list.
         self.options_on_return = []  # type: List[List[Frame]]
 
-        # Maps expr.literal_hash to get_declaration(expr)
+        # Maps literal_hash(expr) to get_declaration(expr)
         # for every expr stored in the binder
         self.declarations = DeclarationsFrame()
         # Set of other keys to invalidate if a key is changed, e.g. x -> {x.a, x[0]}
@@ -89,14 +88,13 @@ class ConditionalTypeBinder:
         self.break_frames = []  # type: List[int]
         self.continue_frames = []  # type: List[int]
 
-    def _add_dependencies(self, key: Key, value: Key = None) -> None:
+    def _add_dependencies(self, key: Key, value: Optional[Key] = None) -> None:
         if value is None:
             value = key
         else:
             self.dependencies.setdefault(key, set()).add(value)
-        for elt in key:
-            if isinstance(elt, Key):
-                self._add_dependencies(elt, value)
+        for elt in subkeys(key):
+            self._add_dependencies(elt, value)
 
     def push_frame(self) -> Frame:
         """Push a new frame into the binder."""
@@ -119,12 +117,11 @@ class ConditionalTypeBinder:
     def put(self, expr: Expression, typ: Type) -> None:
         if not isinstance(expr, BindableTypes):
             return
-        if not expr.literal:
+        if not literal(expr):
             return
-        key = expr.literal_hash
+        key = literal_hash(expr)
         assert key is not None, 'Internal error: binder tried to put non-literal'
         if key not in self.declarations:
-            assert isinstance(expr, BindableTypes)
             self.declarations[key] = get_declaration(expr)
             self._add_dependencies(key)
         self._put(key, typ)
@@ -133,8 +130,9 @@ class ConditionalTypeBinder:
         self.frames[-1].unreachable = True
 
     def get(self, expr: Expression) -> Optional[Type]:
-        assert expr.literal_hash is not None, 'Internal error: binder tried to get non-literal'
-        return self._get(expr.literal_hash)
+        key = literal_hash(expr)
+        assert key is not None, 'Internal error: binder tried to get non-literal'
+        return self._get(key)
 
     def is_unreachable(self) -> bool:
         # TODO: Copy the value of unreachable into new frames to avoid
@@ -143,8 +141,9 @@ class ConditionalTypeBinder:
 
     def cleanse(self, expr: Expression) -> None:
         """Remove all references to a Node from the binder."""
-        assert expr.literal_hash is not None, 'Internal error: binder tried cleanse non-literal'
-        self._cleanse_key(expr.literal_hash)
+        key = literal_hash(expr)
+        assert key is not None, 'Internal error: binder tried cleanse non-literal'
+        self._cleanse_key(key)
 
     def _cleanse_key(self, key: Key) -> None:
         """Remove all references to a key from the binder."""
@@ -175,10 +174,11 @@ class ConditionalTypeBinder:
 
             type = resulting_values[0]
             assert type is not None
-            if isinstance(self.declarations.get(key), AnyType):
+            declaration_type = self.declarations.get(key)
+            if isinstance(declaration_type, AnyType):
                 # At this point resulting values can't contain None, see continue above
                 if not all(is_same_type(type, cast(Type, t)) for t in resulting_values[1:]):
-                    type = AnyType()
+                    type = AnyType(TypeOfAny.from_another_any, source_any=declaration_type)
             else:
                 for other in resulting_values[1:]:
                     assert other is not None
@@ -212,11 +212,11 @@ class ConditionalTypeBinder:
 
     def assign_type(self, expr: Expression,
                     type: Type,
-                    declared_type: Type,
+                    declared_type: Optional[Type],
                     restrict_any: bool = False) -> None:
         if not isinstance(expr, BindableTypes):
             return None
-        if not expr.literal:
+        if not literal(expr):
             return
         self.invalidate_dependencies(expr)
 
@@ -233,12 +233,16 @@ class ConditionalTypeBinder:
             # times?
             return
 
-        # If x is Any and y is int, after x = y we do not infer that x is int.
-        # This could be changed.
-
-        if (isinstance(self.most_recent_enclosing_type(expr, type), AnyType)
+        enclosing_type = self.most_recent_enclosing_type(expr, type)
+        if (isinstance(enclosing_type, AnyType)
                 and not restrict_any):
-            pass
+            # If x is Any and y is int, after x = y we do not infer that x is int.
+            # This could be changed.
+            if not isinstance(type, AnyType):
+                # We narrowed type from Any in a recent frame (probably an
+                # isinstance check), but now it is reassigned, so broaden back
+                # to Any (which is the most recent enclosing type)
+                self.put(expr, enclosing_type)
         elif (isinstance(type, AnyType)
               and not (isinstance(declared_type, UnionType)
                        and any(isinstance(item, AnyType) for item in declared_type.items))):
@@ -261,14 +265,15 @@ class ConditionalTypeBinder:
         It is overly conservative: it invalidates globally, including
         in code paths unreachable from here.
         """
-        assert expr.literal_hash is not None
-        for dep in self.dependencies.get(expr.literal_hash, set()):
+        key = literal_hash(expr)
+        assert key is not None
+        for dep in self.dependencies.get(key, set()):
             self._cleanse_key(dep)
 
     def most_recent_enclosing_type(self, expr: BindableExpression, type: Type) -> Optional[Type]:
         if isinstance(type, AnyType):
             return get_declaration(expr)
-        key = expr.literal_hash
+        key = literal_hash(expr)
         assert key is not None
         enclosers = ([get_declaration(expr)] +
                      [f[key] for f in self.frames
diff --git a/mypy/build.py b/mypy/build.py
index 471f819..a079aa0 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -17,11 +17,12 @@ import hashlib
 import json
 import os.path
 import re
+import site
 import sys
 import time
 from os.path import dirname, basename
 
-from typing import (AbstractSet, Dict, Iterable, Iterator, List,
+from typing import (AbstractSet, Dict, Iterable, Iterator, List, cast, Any,
                     NamedTuple, Optional, Set, Tuple, Union, Callable)
 # Can't use TYPE_CHECKING because it's not in the Python 3.5.1 stdlib
 MYPY = False
@@ -44,6 +45,7 @@ from mypy.stats import dump_type_stats
 from mypy.types import Type
 from mypy.version import __version__
 from mypy.plugin import Plugin, DefaultPlugin, ChainedPlugin
+from mypy.defaults import PYTHON3_VERSION_MIN
 
 
 # We need to know the location of this file to load data, but
@@ -116,8 +118,8 @@ class BuildSourceSet:
 
 def build(sources: List[BuildSource],
           options: Options,
-          alt_lib_path: str = None,
-          bin_dir: str = None) -> BuildResult:
+          alt_lib_path: Optional[str] = None,
+          bin_dir: Optional[str] = None) -> BuildResult:
     """Analyze a program.
 
     A single call to build performs parsing, semantic analysis and optionally
@@ -212,6 +214,12 @@ def default_data_dir(bin_dir: Optional[str]) -> str:
       bin_dir: directory containing the mypy script
     """
     if not bin_dir:
+        if os.name == 'nt':
+            prefixes = [os.path.join(sys.prefix, 'Lib'), os.path.join(site.getuserbase(), 'lib')]
+            for parent in prefixes:
+                    data_dir = os.path.join(parent, 'mypy')
+                    if os.path.exists(data_dir):
+                        return data_dir
         mypy_package = os.path.dirname(__file__)
         parent = os.path.dirname(mypy_package)
         if (os.path.basename(parent) == 'site-packages' or
@@ -219,13 +227,14 @@ def default_data_dir(bin_dir: Optional[str]) -> str:
             # Installed in site-packages or dist-packages, but invoked with python3 -m mypy;
             # __file__ is .../blah/lib/python3.N/site-packages/mypy/build.py
             # or .../blah/lib/python3.N/dist-packages/mypy/build.py (Debian)
+            # or .../blah/lib64/python3.N/dist-packages/mypy/build.py (Gentoo)
             # or .../blah/lib/site-packages/mypy/build.py (Windows)
             # blah may be a virtualenv or /usr/local.  We want .../blah/lib/mypy.
             lib = parent
             for i in range(2):
                 lib = os.path.dirname(lib)
-                if os.path.basename(lib) == 'lib':
-                    return os.path.join(lib, 'mypy')
+                if os.path.basename(lib) in ('lib', 'lib32', 'lib64'):
+                    return os.path.join(os.path.dirname(lib), 'lib/mypy')
         subdir = os.path.join(parent, 'lib', 'mypy')
         if os.path.isdir(subdir):
             # If installed via buildout, the __file__ is
@@ -278,12 +287,15 @@ def default_lib_path(data_dir: str,
         if os.path.isdir(auto):
             data_dir = auto
         typeshed_dir = os.path.join(data_dir, "typeshed")
-    # We allow a module for e.g. version 3.5 to be in 3.4/. The assumption
-    # is that a module added with 3.4 will still be present in Python 3.5.
-    versions = ["%d.%d" % (pyversion[0], minor)
-                for minor in reversed(range(pyversion[1] + 1))]
-    # E.g. for Python 3.2, try 3.2/, 3.1/, 3.0/, 3/, 2and3/.
-    # (Note that 3.1 and 3.0 aren't really supported, but we don't care.)
+    if pyversion[0] == 3:
+        # We allow a module for e.g. version 3.5 to be in 3.4/. The assumption
+        # is that a module added with 3.4 will still be present in Python 3.5.
+        versions = ["%d.%d" % (pyversion[0], minor)
+                    for minor in reversed(range(PYTHON3_VERSION_MIN[1], pyversion[1] + 1))]
+    else:
+        # For Python 2, we only have stubs for 2.7
+        versions = ["2.7"]
+    # E.g. for Python 3.5, try 3.5/, 3.4/, 3.3/, 3/, 2and3/.
     for v in versions + [str(pyversion[0]), '2and3']:
         for lib_type in ['stdlib', 'third_party']:
             stubdir = os.path.join(typeshed_dir, lib_type, v)
@@ -293,7 +305,12 @@ def default_lib_path(data_dir: str,
     # Add fallback path that can be used if we have a broken installation.
     if sys.platform != 'win32':
         path.append('/usr/local/lib/mypy')
-
+    if not path:
+        print("Could not resolve typeshed subdirectories. If you are using mypy\n"
+              "from source, you need to run \"git submodule update --init\".\n"
+              "Otherwise your mypy install is broken.\nPython executable is located at "
+              "{0}.\nMypy located at {1}".format(sys.executable, data_dir), file=sys.stderr)
+        sys.exit(1)
     return path
 
 
@@ -312,6 +329,7 @@ CacheMeta = NamedTuple('CacheMeta',
                         ('dep_prios', List[int]),
                         ('interface_hash', str),  # hash representing the public interface
                         ('version_id', str),  # mypy version for cache invalidation
+                        ('ignore_all', bool),  # if errors were ignored
                         ])
 # NOTE: dependencies + suppressed == all reachable imports;
 # suppressed contains those reachable imports that were prevented by
@@ -481,7 +499,7 @@ class BuildManager:
         self.semantic_analyzer = SemanticAnalyzer(self.modules, self.missing_modules,
                                                   lib_path, self.errors, self.plugin)
         self.modules = self.semantic_analyzer.modules
-        self.semantic_analyzer_pass3 = ThirdPass(self.modules, self.errors)
+        self.semantic_analyzer_pass3 = ThirdPass(self.modules, self.errors, self.semantic_analyzer)
         self.all_types = {}  # type: Dict[Expression, Type]
         self.indirection_detector = TypeIndirectionVisitor()
         self.stale_modules = set()  # type: Set[str]
@@ -642,7 +660,7 @@ def remove_cwd_prefix_from_path(p: str) -> str:
 
 
 # Cache find_module: (id, lib_path) -> result.
-find_module_cache = {}  # type: Dict[Tuple[str, Tuple[str, ...]], str]
+find_module_cache = {}  # type: Dict[Tuple[str, Tuple[str, ...]], Optional[str]]
 
 # Cache some repeated work within distinct find_module calls: finding which
 # elements of lib_path have even the subdirectory they'd need for the module
@@ -672,7 +690,7 @@ def list_dir(path: str) -> Optional[List[str]]:
     if path in find_module_listdir_cache:
         return find_module_listdir_cache[path]
     try:
-        res = os.listdir(path)
+        res = os.listdir(path)  # type: Optional[List[str]]
     except OSError:
         res = None
     find_module_listdir_cache[path] = res
@@ -697,7 +715,7 @@ def is_file(path: str) -> bool:
     return os.path.isfile(path)
 
 
-def find_module(id: str, lib_path_arg: Iterable[str]) -> str:
+def find_module(id: str, lib_path_arg: Iterable[str]) -> Optional[str]:
     """Return the path of the module source file, or None if not found."""
     lib_path = tuple(lib_path_arg)
 
@@ -872,21 +890,23 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
         manager.log('Could not load cache for {}: meta cache is not a dict: {}'
                     .format(id, repr(meta)))
         return None
+    sentinel = None  # type: Any  # the values will be post-validated below
     m = CacheMeta(
-        meta.get('id'),
-        meta.get('path'),
-        int(meta['mtime']) if 'mtime' in meta else None,
-        meta.get('size'),
-        meta.get('hash'),
+        meta.get('id', sentinel),
+        meta.get('path', sentinel),
+        int(meta['mtime']) if 'mtime' in meta else sentinel,
+        meta.get('size', sentinel),
+        meta.get('hash', sentinel),
         meta.get('dependencies', []),
-        int(meta['data_mtime']) if 'data_mtime' in meta else None,
+        int(meta['data_mtime']) if 'data_mtime' in meta else sentinel,
         data_json,
         meta.get('suppressed', []),
         meta.get('child_modules', []),
         meta.get('options'),
         meta.get('dep_prios', []),
         meta.get('interface_hash', ''),
-        meta.get('version_id'),
+        meta.get('version_id', sentinel),
+        meta.get('ignore_all', True),
     )
     # Don't check for path match, that is dealt with in validate_meta().
     if (m.id != id or
@@ -943,8 +963,8 @@ def atomic_write(filename: str, *lines: str) -> bool:
     return True
 
 
-def validate_meta(meta: Optional[CacheMeta], id: str, path: str,
-                  manager: BuildManager) -> Optional[CacheMeta]:
+def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
+                  ignore_all: bool, manager: BuildManager) -> Optional[CacheMeta]:
     '''Checks whether the cached AST of this module can be used.
 
     Return:
@@ -961,6 +981,11 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: str,
         manager.log('Metadata not found for {}'.format(id))
         return None
 
+    if meta.ignore_all and not ignore_all:
+        manager.log('Metadata abandoned for {}: errors were previously ignored'.format(id))
+        return None
+
+    assert path is not None, "Internal error: meta was provided without a path"
     # Check data_json; assume if its mtime matches it's good.
     # TODO: stat() errors
     data_mtime = getmtime(meta.data_json)
@@ -1002,6 +1027,7 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: str,
                 'dep_prios': meta.dep_prios,
                 'interface_hash': meta.interface_hash,
                 'version_id': manager.version_id,
+                'ignore_all': meta.ignore_all,
             }
             if manager.options.debug_cache:
                 meta_str = json.dumps(meta_dict, indent=2, sort_keys=True)
@@ -1028,7 +1054,8 @@ def compute_hash(text: str) -> str:
 def write_cache(id: str, path: str, tree: MypyFile,
                 dependencies: List[str], suppressed: List[str],
                 child_modules: List[str], dep_prios: List[int],
-                old_interface_hash: str, source_hash: str, manager: BuildManager) -> str:
+                old_interface_hash: str, source_hash: str,
+                ignore_all: bool, manager: BuildManager) -> str:
     """Write cache files for a module.
 
     Note that this mypy's behavior is still correct when any given
@@ -1121,6 +1148,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
             'dep_prios': dep_prios,
             'interface_hash': interface_hash,
             'version_id': manager.version_id,
+            'ignore_all': ignore_all,
             }
 
     # Write meta cache file
@@ -1311,7 +1339,7 @@ class State:
     ancestors = None  # type: Optional[List[str]]
 
     # A list of all direct submodules of a given module
-    child_modules = None  # type: Optional[Set[str]]
+    child_modules = None  # type: Set[str]
 
     # List of (path, line number) tuples giving context for import
     import_context = None  # type: List[Tuple[str, int]]
@@ -1339,9 +1367,9 @@ class State:
                  path: Optional[str],
                  source: Optional[str],
                  manager: BuildManager,
-                 caller_state: 'State' = None,
+                 caller_state: 'Optional[State]' = None,
                  caller_line: int = 0,
-                 ancestor_for: 'State' = None,
+                 ancestor_for: 'Optional[State]' = None,
                  root_source: bool = False,
                  ) -> None:
         assert id or path or source is not None, "Neither id, path nor source given"
@@ -1358,6 +1386,7 @@ class State:
         self.id = id or '__main__'
         self.options = manager.options.clone_for_module(self.id)
         if not path and source is None:
+            assert id is not None
             file_id = id
             if id == 'builtins' and self.options.python_version[0] == 2:
                 # The __builtin__ module is called internally by mypy
@@ -1377,12 +1406,9 @@ class State:
                 # - skip -> don't analyze, make the type Any
                 follow_imports = self.options.follow_imports
                 if (follow_imports != 'normal'
-                    and not root_source  # Honor top-level modules
-                    and path.endswith('.py')  # Stubs are always normal
-                    and id != 'builtins'  # Builtins is always normal
-                    and not (caller_state and
-                             caller_state.tree and
-                             caller_state.tree.is_stub)):
+                        and not root_source  # Honor top-level modules
+                        and path.endswith('.py')  # Stubs are always normal
+                        and id != 'builtins'):  # Builtins is always normal
                     if follow_imports == 'silent':
                         # Still import it, but silence non-blocker errors.
                         manager.log("Silencing %s (%s)" % (path, id))
@@ -1419,12 +1445,12 @@ class State:
         self.xpath = path or '<string>'
         self.source = source
         if path and source is None and self.options.incremental:
-            self.meta = find_cache_meta(self.id, self.path, manager)
+            self.meta = find_cache_meta(self.id, path, manager)
             # TODO: Get mtime if not cached.
             if self.meta is not None:
                 self.interface_hash = self.meta.interface_hash
         self.add_ancestors()
-        self.meta = validate_meta(self.meta, self.id, self.path, manager)
+        self.meta = validate_meta(self.meta, self.id, self.path, self.ignore_all, manager)
         if self.meta:
             # Make copies, since we may modify these and want to
             # compare them to the originals later.
@@ -1536,6 +1562,8 @@ class State:
     # Methods for processing cached modules.
 
     def load_tree(self) -> None:
+        assert self.meta is not None, "Internal error: this method must be called only" \
+                                      " for cached modules"
         with open(self.meta.data_json) as f:
             data = json.load(f)
         # TODO: Assert data file wasn't changed.
@@ -1543,10 +1571,12 @@ class State:
         self.manager.modules[self.id] = self.tree
 
     def fix_cross_refs(self) -> None:
+        assert self.tree is not None, "Internal error: method must be called on parsed file only"
         fixup_module_pass_one(self.tree, self.manager.modules,
                               self.manager.options.quick_and_dirty)
 
     def calculate_mros(self) -> None:
+        assert self.tree is not None, "Internal error: method must be called on parsed file only"
         fixup_module_pass_two(self.tree, self.manager.modules,
                               self.manager.options.quick_and_dirty)
 
@@ -1628,6 +1658,7 @@ class State:
                 except (UnicodeDecodeError, DecodeError) as decodeerr:
                     raise CompileError([
                         "mypy: can't decode file '{}': {}".format(self.path, str(decodeerr))])
+            assert source is not None
             self.tree = manager.parse_file(self.id, self.xpath, source,
                                            self.ignore_all or self.options.ignore_errors)
 
@@ -1691,22 +1722,28 @@ class State:
         self.check_blockers()
 
     def semantic_analysis(self) -> None:
+        assert self.tree is not None, "Internal error: method must be called on parsed file only"
         patches = []  # type: List[Callable[[], None]]
         with self.wrap_context():
             self.manager.semantic_analyzer.visit_file(self.tree, self.xpath, self.options, patches)
         self.patches = patches
 
     def semantic_analysis_pass_three(self) -> None:
+        assert self.tree is not None, "Internal error: method must be called on parsed file only"
+        patches = []  # type: List[Callable[[], None]]
         with self.wrap_context():
-            self.manager.semantic_analyzer_pass3.visit_file(self.tree, self.xpath, self.options)
+            self.manager.semantic_analyzer_pass3.visit_file(self.tree, self.xpath,
+                                                            self.options, patches)
             if self.options.dump_type_stats:
                 dump_type_stats(self.tree, self.xpath)
+        self.patches = patches + self.patches
 
     def semantic_analysis_apply_patches(self) -> None:
         for patch_func in self.patches:
             patch_func()
 
     def type_check_first_pass(self) -> None:
+        assert self.tree is not None, "Internal error: method must be called on parsed file only"
         manager = self.manager
         if self.options.semantic_analysis_only:
             return
@@ -1722,6 +1759,7 @@ class State:
             return self.type_checker.check_second_pass()
 
     def finish_passes(self) -> None:
+        assert self.tree is not None, "Internal error: method must be called on parsed file only"
         manager = self.manager
         if self.options.semantic_analysis_only:
             return
@@ -1741,7 +1779,7 @@ class State:
                                      module_refs: Set[str],
                                      type_map: Dict[Expression, Type]) -> None:
         types = set(type_map.values())
-        types.discard(None)
+        assert None not in types
         valid = self.valid_references()
 
         encountered = self.manager.indirection_detector.find_modules(types) | module_refs
@@ -1757,8 +1795,9 @@ class State:
                 self.suppressed.append(dep)
 
     def valid_references(self) -> Set[str]:
+        assert self.ancestors is not None
         valid_refs = set(self.dependencies + self.suppressed + self.ancestors)
-        valid_refs .add(self.id)
+        valid_refs.add(self.id)
 
         if "os" in valid_refs:
             valid_refs.add("os.path")
@@ -1766,6 +1805,7 @@ class State:
         return valid_refs
 
     def write_cache(self) -> None:
+        assert self.tree is not None, "Internal error: method must be called on parsed file only"
         if not self.path or self.options.cache_dir == os.devnull:
             return
         if self.manager.options.quick_and_dirty:
@@ -1778,7 +1818,7 @@ class State:
         new_interface_hash = write_cache(
             self.id, self.path, self.tree,
             list(self.dependencies), list(self.suppressed), list(self.child_modules),
-            dep_prios, self.interface_hash, self.source_hash,
+            dep_prios, self.interface_hash, self.source_hash, self.ignore_all,
             self.manager)
         if new_interface_hash == self.interface_hash:
             self.manager.log("Cached module {} has same interface".format(self.id))
@@ -1886,6 +1926,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
     # Collect dependencies.  We go breadth-first.
     while new:
         st = new.popleft()
+        assert st.ancestors is not None
         for dep in st.ancestors + st.dependencies + st.suppressed:
             # We don't want to recheck imports marked with '# type: ignore'
             # so we ignore any suppressed module not explicitly re-included
@@ -1926,6 +1967,10 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
     return graph
 
 
+class FreshState(State):
+    meta = None  # type: CacheMeta
+
+
 def process_graph(graph: Graph, manager: BuildManager) -> None:
     """Process everything in dependency order."""
     sccs = sorted_components(graph)
@@ -1980,23 +2025,25 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
         if fresh:
             # All cache files are fresh.  Check that no dependency's
             # cache file is newer than any scc node's cache file.
-            oldest_in_scc = min(graph[id].meta.data_mtime for id in scc)
+            fresh_graph = cast(Dict[str, FreshState], graph)
+            oldest_in_scc = min(fresh_graph[id].meta.data_mtime for id in scc)
             viable = {id for id in stale_deps if graph[id].meta is not None}
-            newest_in_deps = 0 if not viable else max(graph[dep].meta.data_mtime for dep in viable)
+            newest_in_deps = 0 if not viable else max(fresh_graph[dep].meta.data_mtime
+                                                      for dep in viable)
             if manager.options.verbosity >= 3:  # Dump all mtimes for extreme debugging.
-                all_ids = sorted(ascc | viable, key=lambda id: graph[id].meta.data_mtime)
+                all_ids = sorted(ascc | viable, key=lambda id: fresh_graph[id].meta.data_mtime)
                 for id in all_ids:
                     if id in scc:
-                        if graph[id].meta.data_mtime < newest_in_deps:
+                        if fresh_graph[id].meta.data_mtime < newest_in_deps:
                             key = "*id:"
                         else:
                             key = "id:"
                     else:
-                        if graph[id].meta.data_mtime > oldest_in_scc:
+                        if fresh_graph[id].meta.data_mtime > oldest_in_scc:
                             key = "+dep:"
                         else:
                             key = "dep:"
-                    manager.trace(" %5s %.0f %s" % (key, graph[id].meta.data_mtime, id))
+                    manager.trace(" %5s %.0f %s" % (key, fresh_graph[id].meta.data_mtime, id))
             # If equal, give the benefit of the doubt, due to 1-sec time granularity
             # (on some platforms).
             if manager.options.quick_and_dirty and stale_deps:
diff --git a/mypy/checker.py b/mypy/checker.py
index aab56b5..a34fba9 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -27,12 +27,13 @@ from mypy.nodes import (
     RefExpr, YieldExpr, BackquoteExpr, Import, ImportFrom, ImportAll, ImportBase,
     AwaitExpr, PromoteExpr, Node, EnumCallExpr,
     ARG_POS, MDEF,
-    CONTRAVARIANT, COVARIANT)
+    CONTRAVARIANT, COVARIANT, INVARIANT)
 from mypy import nodes
+from mypy.literals import literal, literal_hash
 from mypy.typeanal import has_any_from_unimported_type, check_for_explicit_any
 from mypy.types import (
     Type, AnyType, CallableType, FunctionLike, Overloaded, TupleType, TypedDictType,
-    Instance, NoneTyp, strip_type, TypeType,
+    Instance, NoneTyp, strip_type, TypeType, TypeOfAny,
     UnionType, TypeVarId, TypeVarType, PartialType, DeletedType, UninhabitedType, TypeVarDef,
     true_only, false_only, function_type, is_named_instance, union_items
 )
@@ -44,7 +45,7 @@ from mypy import messages
 from mypy.subtypes import (
     is_subtype, is_equivalent, is_proper_subtype, is_more_precise,
     restrict_subtype_away, is_subtype_ignoring_tvars, is_callable_subtype,
-    unify_generic_callable,
+    unify_generic_callable, find_member
 )
 from mypy.maptype import map_instance_to_supertype
 from mypy.typevars import fill_typevars, has_no_typevars
@@ -109,6 +110,9 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
     dynamic_funcs = None  # type: List[bool]
     # Stack of collections of variables with partial types
     partial_types = None  # type: List[Dict[Var, Context]]
+    # Vars for which partial type errors are already reported
+    # (to avoid logically duplicate errors with different error context).
+    partial_reported = None  # type: Set[Var]
     globals = None  # type: SymbolTable
     modules = None  # type: Dict[str, MypyFile]
     # Nodes that couldn't be checked because some types weren't available. We'll run
@@ -124,6 +128,9 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
     # Should strict Optional-related errors be suppressed in this file?
     suppress_none_errors = False  # TODO: Get it from options instead
     options = None  # type: Options
+    # Used for collecting inferred attribute types so that they can be checked
+    # for consistency.
+    inferred_attribute_types = None  # type: Optional[Dict[Var, Type]]
 
     # The set of all dependencies (suppressed or not) that this module accesses, either
     # directly or indirectly.
@@ -153,6 +160,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         self.return_types = []
         self.dynamic_funcs = []
         self.partial_types = []
+        self.partial_reported = set()
         self.deferred_nodes = []
         self.type_map = {}
         self.module_refs = set()
@@ -160,6 +168,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         self.current_node_deferred = False
         self.is_stub = tree.is_stub
         self.is_typeshed_stub = errors.is_typeshed_file(path)
+        self.inferred_attribute_types = None
         if options.strict_optional_whitelist is None:
             self.suppress_none_errors = not options.show_none_errors
         else:
@@ -200,7 +209,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                     self.fail(messages.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s),
                             all_node)
 
-    def check_second_pass(self, todo: List[DeferredNode] = None) -> bool:
+    def check_second_pass(self, todo: Optional[List[DeferredNode]] = None) -> bool:
         """Run second or following pass of type checking.
 
         This goes through deferred nodes, returning True if there were any.
@@ -274,8 +283,8 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         except Exception as err:
             report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options)
 
-    def accept_loop(self, body: Statement, else_body: Statement = None, *,
-                    exit_condition: Expression = None) -> None:
+    def accept_loop(self, body: Statement, else_body: Optional[Statement] = None, *,
+                    exit_condition: Optional[Expression] = None) -> None:
         """Repeatedly type check a loop body until the frame doesn't change.
         If exit_condition is set, assume it must be False on exit from the loop.
 
@@ -418,11 +427,12 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         """
         if is_coroutine:
             # This means we're in Python 3.5 or later.
-            at = self.named_generic_type('typing.Awaitable', [AnyType()])
+            at = self.named_generic_type('typing.Awaitable', [AnyType(TypeOfAny.special_form)])
             if is_subtype(at, typ):
                 return True
         else:
-            gt = self.named_generic_type('typing.Generator', [AnyType(), AnyType(), AnyType()])
+            any_type = AnyType(TypeOfAny.special_form)
+            gt = self.named_generic_type('typing.Generator', [any_type, any_type, any_type])
             if is_subtype(gt, typ):
                 return True
         return isinstance(typ, Instance) and typ.type.fullname() == 'typing.AwaitableGenerator'
@@ -433,7 +443,8 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         True if `typ` is a supertype of AsyncGenerator.
         """
         try:
-            agt = self.named_generic_type('typing.AsyncGenerator', [AnyType(), AnyType()])
+            any_type = AnyType(TypeOfAny.special_form)
+            agt = self.named_generic_type('typing.AsyncGenerator', [any_type, any_type])
         except KeyError:
             # we're running on a version of typing that doesn't have AsyncGenerator yet
             return False
@@ -442,18 +453,18 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
     def get_generator_yield_type(self, return_type: Type, is_coroutine: bool) -> Type:
         """Given the declared return type of a generator (t), return the type it yields (ty)."""
         if isinstance(return_type, AnyType):
-            return AnyType()
+            return AnyType(TypeOfAny.from_another_any, source_any=return_type)
         elif (not self.is_generator_return_type(return_type, is_coroutine)
                 and not self.is_async_generator_return_type(return_type)):
             # If the function doesn't have a proper Generator (or
             # Awaitable) return type, anything is permissible.
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         elif not isinstance(return_type, Instance):
             # Same as above, but written as a separate branch so the typechecker can understand.
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         elif return_type.type.fullname() == 'typing.Awaitable':
             # Awaitable: ty is Any.
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
         elif return_type.args:
             # AwaitableGenerator, Generator, AsyncGenerator, Iterator, or Iterable; ty is args[0].
             ret_type = return_type.args[0]
@@ -464,23 +475,23 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             # parameters (i.e. is `object`), then the yielded values can't
             # be accessed so any type is acceptable.  IOW, ty is Any.
             # (However, see https://github.com/python/mypy/issues/1933)
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
 
     def get_generator_receive_type(self, return_type: Type, is_coroutine: bool) -> Type:
         """Given a declared generator return type (t), return the type its yield receives (tc)."""
         if isinstance(return_type, AnyType):
-            return AnyType()
+            return AnyType(TypeOfAny.from_another_any, source_any=return_type)
         elif (not self.is_generator_return_type(return_type, is_coroutine)
                 and not self.is_async_generator_return_type(return_type)):
             # If the function doesn't have a proper Generator (or
             # Awaitable) return type, anything is permissible.
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         elif not isinstance(return_type, Instance):
             # Same as above, but written as a separate branch so the typechecker can understand.
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         elif return_type.type.fullname() == 'typing.Awaitable':
             # Awaitable, AwaitableGenerator: tc is Any.
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
         elif (return_type.type.fullname() in ('typing.Generator', 'typing.AwaitableGenerator')
               and len(return_type.args) >= 3):
             # Generator: tc is args[1].
@@ -495,14 +506,14 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
     def get_generator_return_type(self, return_type: Type, is_coroutine: bool) -> Type:
         """Given the declared return type of a generator (t), return the type it returns (tr)."""
         if isinstance(return_type, AnyType):
-            return AnyType()
+            return AnyType(TypeOfAny.from_another_any, source_any=return_type)
         elif not self.is_generator_return_type(return_type, is_coroutine):
             # If the function doesn't have a proper Generator (or
             # Awaitable) return type, anything is permissible.
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         elif not isinstance(return_type, Instance):
             # Same as above, but written as a separate branch so the typechecker can understand.
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         elif return_type.type.fullname() == 'typing.Awaitable' and len(return_type.args) == 1:
             # Awaitable: tr is args[0].
             return return_type.args[0]
@@ -512,7 +523,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             return return_type.args[2]
         else:
             # Supertype of Generator (Iterator, Iterable, object): tr is any.
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
 
     def visit_func_def(self, defn: FuncDef) -> None:
         """Type check a function definition."""
@@ -555,15 +566,15 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                                        'original type')
 
     def check_func_item(self, defn: FuncItem,
-                        type_override: CallableType = None,
-                        name: str = None) -> None:
+                        type_override: Optional[CallableType] = None,
+                        name: Optional[str] = None) -> None:
         """Type check a function.
 
         If type_override is provided, use it as the function type.
         """
         # We may be checking a function definition or an anonymous function. In
         # the first case, set up another reference with the precise type.
-        fdef = None  # type: FuncDef
+        fdef = None  # type: Optional[FuncDef]
         if isinstance(defn, FuncDef):
             fdef = defn
 
@@ -575,14 +586,22 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                 if type_override:
                     typ = type_override
                 if isinstance(typ, CallableType):
-                    self.check_func_def(defn, typ, name)
+                    with self.enter_attribute_inference_context():
+                        self.check_func_def(defn, typ, name)
                 else:
                     raise RuntimeError('Not supported')
 
         self.dynamic_funcs.pop()
         self.current_node_deferred = False
 
-    def check_func_def(self, defn: FuncItem, typ: CallableType, name: str) -> None:
+    @contextmanager
+    def enter_attribute_inference_context(self) -> Iterator[None]:
+        old_types = self.inferred_attribute_types
+        self.inferred_attribute_types = {}
+        yield None
+        self.inferred_attribute_types = old_types
+
+    def check_func_def(self, defn: FuncItem, typ: CallableType, name: Optional[str]) -> None:
         """Type check a function definition."""
         # Expand type variables with value restrictions to ordinary types.
         for item, typ in self.expand_typevars(defn, typ):
@@ -596,10 +615,6 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                 # precise type.
                 if isinstance(item, FuncDef):
                     fdef = item
-                else:
-                    fdef = None
-
-                if fdef:
                     # Check if __init__ has an invalid, non-None return type.
                     if (fdef.info and fdef.name() in ('__init__', '__init_subclass__') and
                             not isinstance(typ.ret_type, NoneTyp) and
@@ -607,19 +622,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                         self.fail(messages.MUST_HAVE_NONE_RETURN_TYPE.format(fdef.name()),
                                   item)
 
-                    show_untyped = not self.is_typeshed_stub or self.options.warn_incomplete_stub
-                    if self.options.disallow_untyped_defs and show_untyped:
-                        # Check for functions with unspecified/not fully specified types.
-                        def is_implicit_any(t: Type) -> bool:
-                            return isinstance(t, AnyType) and t.implicit
-
-                        if fdef.type is None:
-                            self.fail(messages.FUNCTION_TYPE_EXPECTED, fdef)
-                        elif isinstance(fdef.type, CallableType):
-                            if is_implicit_any(fdef.type.ret_type):
-                                self.fail(messages.RETURN_TYPE_EXPECTED, fdef)
-                            if any(is_implicit_any(t) for t in fdef.type.arg_types):
-                                self.fail(messages.ARGUMENT_TYPE_EXPECTED, fdef)
+                    self.check_for_missing_annotations(fdef)
                     if 'unimported' in self.options.disallow_any:
                         if fdef.type and isinstance(fdef.type, CallableType):
                             ret_type = fdef.type.ret_type
@@ -631,12 +634,15 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                                     self.msg.unimported_type_becomes_any(prefix, arg_type, fdef)
                     check_for_explicit_any(fdef.type, self.options, self.is_typeshed_stub,
                                            self.msg, context=fdef)
-                if name in nodes.reverse_op_method_set:
-                    self.check_reverse_op_method(item, typ, name)
-                elif name in ('__getattr__', '__getattribute__'):
-                    self.check_getattr_method(typ, defn)
-                elif name == '__setattr__':
-                    self.check_setattr_method(typ, defn)
+
+                if name:  # Special method names
+                    if name in nodes.reverse_op_method_set:
+                        self.check_reverse_op_method(item, typ, name)
+                    elif name in ('__getattr__', '__getattribute__'):
+                        self.check_getattr_method(typ, defn, name)
+                    elif name == '__setattr__':
+                        self.check_setattr_method(typ, defn)
+
                 # Refuse contravariant return type variable
                 if isinstance(typ.ret_type, TypeVarType):
                     if typ.ret_type.variance == CONTRAVARIANT:
@@ -728,9 +734,15 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
 
                 # Type check initialization expressions.
                 for arg in item.arguments:
-                    init = arg.initialization_statement
-                    if init:
-                        self.accept(init)
+                    if arg.initializer is not None:
+                        name = arg.variable.name()
+                        msg = 'Incompatible default for '
+                        if name.startswith('__tuple_arg_'):
+                            msg += "tuple argument {}".format(name[12:])
+                        else:
+                            msg += 'argument "{}"'.format(name)
+                        self.check_simple_assignment(arg.variable.type, arg.initializer,
+                            context=arg, msg=msg, lvalue_name='argument', rvalue_name='default')
 
             # Type check body in a new scope.
             with self.binder.top_frame_context():
@@ -761,6 +773,26 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
 
             self.binder = old_binder
 
+    def check_for_missing_annotations(self, fdef: FuncItem) -> None:
+        # Check for functions with unspecified/not fully specified types.
+        def is_unannotated_any(t: Type) -> bool:
+            return isinstance(t, AnyType) and t.type_of_any == TypeOfAny.unannotated
+
+        has_explicit_annotation = (isinstance(fdef.type, CallableType)
+                                   and any(not is_unannotated_any(t)
+                                           for t in fdef.type.arg_types + [fdef.type.ret_type]))
+
+        show_untyped = not self.is_typeshed_stub or self.options.warn_incomplete_stub
+        check_incomplete_defs = self.options.disallow_incomplete_defs and has_explicit_annotation
+        if show_untyped and (self.options.disallow_untyped_defs or check_incomplete_defs):
+            if fdef.type is None and self.options.disallow_untyped_defs:
+                self.fail(messages.FUNCTION_TYPE_EXPECTED, fdef)
+            elif isinstance(fdef.type, CallableType):
+                if is_unannotated_any(fdef.type.ret_type):
+                    self.fail(messages.RETURN_TYPE_EXPECTED, fdef)
+                if any(is_unannotated_any(t) for t in fdef.type.arg_types):
+                    self.fail(messages.ARGUMENT_TYPE_EXPECTED, fdef)
+
     def is_trivial_body(self, block: Block) -> bool:
         body = block.body
 
@@ -927,17 +959,35 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             if fail:
                 self.msg.signatures_incompatible(method, other_method, defn)
 
-    def check_getattr_method(self, typ: CallableType, context: Context) -> None:
-        method_type = CallableType([AnyType(), self.named_type('builtins.str')],
-                                   [nodes.ARG_POS, nodes.ARG_POS],
-                                   [None, None],
-                                   AnyType(),
-                                   self.named_type('builtins.function'))
+    def check_getattr_method(self, typ: CallableType, context: Context, name: str) -> None:
+        if len(self.scope.stack) == 1:
+            # module-level __getattr__
+            if name == '__getattribute__':
+                self.msg.fail('__getattribute__ is not valid at the module level', context)
+                return
+            elif name == '__getattr__' and not self.is_stub:
+                self.msg.fail('__getattr__ is not valid at the module level outside a stub file',
+                              context)
+                return
+            method_type = CallableType([self.named_type('builtins.str')],
+                                       [nodes.ARG_POS],
+                                       [None],
+                                       AnyType(TypeOfAny.special_form),
+                                       self.named_type('builtins.function'))
+        else:
+            method_type = CallableType([AnyType(TypeOfAny.special_form),
+                                        self.named_type('builtins.str')],
+                                       [nodes.ARG_POS, nodes.ARG_POS],
+                                       [None, None],
+                                       AnyType(TypeOfAny.special_form),
+                                       self.named_type('builtins.function'))
         if not is_subtype(typ, method_type):
             self.msg.invalid_signature(typ, context)
 
     def check_setattr_method(self, typ: CallableType, context: Context) -> None:
-        method_type = CallableType([AnyType(), self.named_type('builtins.str'), AnyType()],
+        method_type = CallableType([AnyType(TypeOfAny.special_form),
+                                    self.named_type('builtins.str'),
+                                    AnyType(TypeOfAny.special_form)],
                                    [nodes.ARG_POS, nodes.ARG_POS, nodes.ARG_POS],
                                    [None, None, None],
                                    NoneTyp(),
@@ -968,13 +1018,13 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         else:
             return [(defn, typ)]
 
-    def check_method_override(self, defn: FuncBase) -> None:
+    def check_method_override(self, defn: Union[FuncBase, Decorator]) -> None:
         """Check if function definition is compatible with base classes."""
         # Check against definitions in base classes.
         for base in defn.info.mro[1:]:
             self.check_method_or_accessor_override_for_base(defn, base)
 
-    def check_method_or_accessor_override_for_base(self, defn: FuncBase,
+    def check_method_or_accessor_override_for_base(self, defn: Union[FuncBase, Decorator],
                                                    base: TypeInfo) -> None:
         """Check if method definition is compatible with a base class."""
         if base:
@@ -994,13 +1044,26 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                                                                   base)
 
     def check_method_override_for_base_with_name(
-            self, defn: FuncBase, name: str, base: TypeInfo) -> None:
+            self, defn: Union[FuncBase, Decorator], name: str, base: TypeInfo) -> None:
         base_attr = base.names.get(name)
         if base_attr:
             # The name of the method is defined in the base class.
 
+            # Point errors at the 'def' line (important for backward compatibility
+            # of type ignores).
+            if not isinstance(defn, Decorator):
+                context = defn
+            else:
+                context = defn.func
             # Construct the type of the overriding method.
-            typ = bind_self(self.function_type(defn), self.scope.active_self_type())
+            if isinstance(defn, FuncBase):
+                typ = self.function_type(defn)  # type: Type
+            else:
+                assert defn.var.is_ready
+                assert defn.var.type is not None
+                typ = defn.var.type
+            if isinstance(typ, FunctionLike) and not is_static(context):
+                typ = bind_self(typ, self.scope.active_self_type())
             # Map the overridden method type to subtype context so that
             # it can be checked for compatibility.
             original_type = base_attr.type
@@ -1011,10 +1074,15 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                     original_type = self.function_type(base_attr.node.func)
                 else:
                     assert False, str(base_attr.node)
-            if isinstance(original_type, FunctionLike):
-                original = map_type_from_supertype(
-                    bind_self(original_type, self.scope.active_self_type()),
-                    defn.info, base)
+            if isinstance(original_type, AnyType) or isinstance(typ, AnyType):
+                pass
+            elif isinstance(original_type, FunctionLike) and isinstance(typ, FunctionLike):
+                if (isinstance(base_attr.node, (FuncBase, Decorator))
+                        and not is_static(base_attr.node)):
+                    bound = bind_self(original_type, self.scope.active_self_type())
+                else:
+                    bound = original_type
+                original = map_type_from_supertype(bound, defn.info, base)
                 # Check that the types are compatible.
                 # TODO overloaded signatures
                 self.check_override(typ,
@@ -1022,12 +1090,17 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                                     defn.name(),
                                     name,
                                     base.name(),
-                                    defn)
-            elif isinstance(original_type, AnyType):
+                                    context)
+            elif is_equivalent(original_type, typ):
+                # Assume invariance for a non-callable attribute here. Note
+                # that this doesn't affect read-only properties which can have
+                # covariant overrides.
+                #
+                # TODO: Allow covariance for read-only attributes?
                 pass
             else:
                 self.msg.signature_incompatible_with_supertype(
-                    defn.name(), name, base.name(), defn)
+                    defn.name(), name, base.name(), context)
 
     def check_override(self, override: FunctionLike, original: FunctionLike,
                        name: str, name_in_super: str, supertype: str,
@@ -1101,6 +1174,8 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
     def visit_class_def(self, defn: ClassDef) -> None:
         """Type check a class definition."""
         typ = defn.info
+        if typ.is_protocol and typ.defn.type_vars:
+            self.check_protocol_variance(defn)
         with self.errors.enter_type(defn.name), self.enter_partial_types():
             old_binder = self.binder
             self.binder = ConditionalTypeBinder()
@@ -1112,6 +1187,33 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                 # Otherwise we've already found errors; more errors are not useful
                 self.check_multiple_inheritance(typ)
 
+    def check_protocol_variance(self, defn: ClassDef) -> None:
+        """Check that protocol definition is compatible with declared
+        variances of type variables.
+
+        Note that we also prohibit declaring protocol classes as invariant
+        if they are actually covariant/contravariant, since this may break
+        transitivity of subtyping, see PEP 544.
+        """
+        info = defn.info
+        object_type = Instance(info.mro[-1], [])
+        tvars = info.defn.type_vars
+        for i, tvar in enumerate(tvars):
+            up_args = [object_type if i == j else AnyType(TypeOfAny.special_form)
+                       for j, _ in enumerate(tvars)]
+            down_args = [UninhabitedType() if i == j else AnyType(TypeOfAny.special_form)
+                         for j, _ in enumerate(tvars)]
+            up, down = Instance(info, up_args), Instance(info, down_args)
+            # TODO: add advanced variance checks for recursive protocols
+            if is_subtype(down, up, ignore_declared_variance=True):
+                expected = COVARIANT
+            elif is_subtype(up, down, ignore_declared_variance=True):
+                expected = CONTRAVARIANT
+            else:
+                expected = INVARIANT
+            if expected != tvar.variance:
+                self.msg.bad_proto_variance(tvar.variance, tvar.name, expected, defn)
+
     def check_multiple_inheritance(self, typ: TypeInfo) -> None:
         """Check for multiple inheritance related errors."""
         if len(typ.bases) <= 1:
@@ -1181,7 +1283,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             lvalue_type, _, __ = self.check_lvalue(lvalue)
             if lvalue_type is None:
                 # TODO: This is broken.
-                lvalue_type = AnyType()
+                lvalue_type = AnyType(TypeOfAny.special_form)
             message = '{} "{}"'.format(messages.INCOMPATIBLE_IMPORT_OF,
                                        cast(NameExpr, assign.rvalue).name)
             self.check_simple_assignment(lvalue_type, assign.rvalue, node,
@@ -1214,7 +1316,8 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             if isinstance(s.lvalues[-1], TupleExpr):
                 # This is a multiple assignment. Instead of figuring out which type is problematic,
                 # give a generic error message.
-                self.msg.unimported_type_becomes_any("A type on this line", AnyType(), s)
+                self.msg.unimported_type_becomes_any("A type on this line",
+                                                     AnyType(TypeOfAny.special_form), s)
             else:
                 self.msg.unimported_type_becomes_any("Type of variable", s.type, s)
         check_for_explicit_any(s.type, self.options, self.is_typeshed_stub, self.msg, context=s)
@@ -1281,15 +1384,16 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                 else:
                     rvalue_type = self.check_simple_assignment(lvalue_type, rvalue, lvalue)
 
-                # Special case: only non-abstract classes can be assigned to variables
-                # with explicit type Type[A].
+                # Special case: only non-abstract non-protocol classes can be assigned to
+                # variables with explicit type Type[A], where A is protocol or abstract.
                 if (isinstance(rvalue_type, CallableType) and rvalue_type.is_type_obj() and
-                        rvalue_type.type_object().is_abstract and
+                        (rvalue_type.type_object().is_abstract or
+                         rvalue_type.type_object().is_protocol) and
                         isinstance(lvalue_type, TypeType) and
                         isinstance(lvalue_type.item, Instance) and
-                        lvalue_type.item.type.is_abstract):
-                    self.fail("Can only assign non-abstract classes"
-                              " to a variable of type '{}'".format(lvalue_type), rvalue)
+                        (lvalue_type.item.type.is_abstract or
+                         lvalue_type.item.type.is_protocol)):
+                    self.msg.concrete_only_assign(lvalue_type, rvalue)
                     return
                 if rvalue_type and infer_lvalue_type:
                     self.binder.assign_type(lvalue, rvalue_type, lvalue_type, False)
@@ -1331,6 +1435,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                 base_type, base_node = self.lvalue_type_from_base(lvalue_node, base)
 
                 if base_type:
+                    assert base_node is not None
                     if not self.check_compatibility_super(lvalue,
                                                           lvalue_type,
                                                           rvalue,
@@ -1353,7 +1458,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         # lvalue had a type defined; this is handled by other
         # parts, and all we have to worry about in that case is
         # that lvalue is compatible with the base class.
-        compare_node = None  # type: Node
+        compare_node = None
         if lvalue_type:
             compare_type = lvalue_type
             compare_node = lvalue.node
@@ -1414,6 +1519,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             if base_type:
                 if not has_no_typevars(base_type):
                     self_type = self.scope.active_self_type()
+                    assert self_type is not None, "Internal error: base lookup outside class"
                     if isinstance(self_type, TupleType):
                         instance = self_type.fallback
                     else:
@@ -1432,7 +1538,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         return None, None
 
     def check_compatibility_classvar_super(self, node: Var,
-                                           base: TypeInfo, base_node: Node) -> bool:
+                                           base: TypeInfo, base_node: Optional[Node]) -> bool:
         if not isinstance(base_node, Var):
             return True
         if node.is_classvar and not base_node.is_classvar:
@@ -1499,7 +1605,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                                rvalue: Expression,
                                context: Context,
                                infer_lvalue_type: bool = True,
-                               msg: str = None) -> None:
+                               msg: Optional[str] = None) -> None:
         """Check the assignment of one rvalue to a number of lvalues."""
 
         # Infer the type of an ordinary rvalue expression.
@@ -1516,7 +1622,9 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             for lv in lvalues:
                 if isinstance(lv, StarExpr):
                     lv = lv.expr
-                self.check_assignment(lv, self.temp_node(AnyType(), context), infer_lvalue_type)
+                temp_node = self.temp_node(AnyType(TypeOfAny.from_another_any,
+                                                   source_any=rvalue_type), context)
+                self.check_assignment(lv, temp_node, infer_lvalue_type)
         elif isinstance(rvalue_type, TupleType):
             self.check_multi_assignment_from_tuple(lvalues, rvalue, rvalue_type,
                                                    context, undefined_rvalue, infer_lvalue_type)
@@ -1577,7 +1685,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         def append_types_for_inference(lvs: List[Expression], rv_types: List[Type]) -> None:
             for lv, rv_type in zip(lvs, rv_types):
                 sub_lvalue_type, index_expr, inferred = self.check_lvalue(lv)
-                if sub_lvalue_type:
+                if sub_lvalue_type and not isinstance(sub_lvalue_type, PartialType):
                     type_parameters.append(sub_lvalue_type)
                 else:  # index lvalue
                     # TODO Figure out more precise type context, probably
@@ -1588,7 +1696,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
 
         if star_lv:
             sub_lvalue_type, index_expr, inferred = self.check_lvalue(star_lv.expr)
-            if sub_lvalue_type:
+            if sub_lvalue_type and not isinstance(sub_lvalue_type, PartialType):
                 type_parameters.extend([sub_lvalue_type] * len(star_rv_types))
             else:  # index lvalue
                 # TODO Figure out more precise type context, probably
@@ -1618,7 +1726,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         if isinstance(type, CallableType) and type.is_type_obj():
             type = type.fallback
         return (is_subtype(type, self.named_generic_type('typing.Iterable',
-                                                        [AnyType()])) and
+                                                         [AnyType(TypeOfAny.special_form)])) and
                 isinstance(type, Instance))
 
     def check_multi_assignment_from_iterable(self, lvalues: List[Lvalue], rvalue_type: Type,
@@ -1697,6 +1805,12 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             if not self.infer_partial_type(name, lvalue, init_type):
                 self.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
                 self.set_inference_error_fallback_type(name, lvalue, init_type, context)
+        elif (isinstance(lvalue, MemberExpr) and self.inferred_attribute_types is not None
+              and lvalue.def_var and lvalue.def_var in self.inferred_attribute_types
+              and not is_same_type(self.inferred_attribute_types[lvalue.def_var], init_type)):
+            # Multiple, inconsistent types inferred for an attribute.
+            self.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
+            name.type = AnyType(TypeOfAny.from_error)
         else:
             # Infer type of the target.
 
@@ -1733,6 +1847,10 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         if var and not self.current_node_deferred:
             var.type = type
             var.is_inferred = True
+            if isinstance(lvalue, MemberExpr) and self.inferred_attribute_types is not None:
+                # Store inferred attribute type so that we can check consistency afterwards.
+                if lvalue.def_var is not None:
+                    self.inferred_attribute_types[lvalue.def_var] = type
             self.store_type(lvalue, type)
 
     def set_inference_error_fallback_type(self, var: Var, lvalue: Lvalue, type: Type,
@@ -1748,16 +1866,16 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         We implement this here by giving x a valid type (Any).
         """
         if context.get_line() in self.errors.ignored_lines[self.errors.file]:
-            self.set_inferred_type(var, lvalue, AnyType())
+            self.set_inferred_type(var, lvalue, AnyType(TypeOfAny.from_error))
 
-    def check_simple_assignment(self, lvalue_type: Type, rvalue: Expression,
+    def check_simple_assignment(self, lvalue_type: Optional[Type], rvalue: Expression,
                                 context: Context,
                                 msg: str = messages.INCOMPATIBLE_TYPES_IN_ASSIGNMENT,
                                 lvalue_name: str = 'variable',
                                 rvalue_name: str = 'expression') -> Type:
         if self.is_stub and isinstance(rvalue, EllipsisExpr):
             # '...' is always a valid initializer in a stub.
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
         else:
             always_allow_any = lvalue_type is not None and not isinstance(lvalue_type, AnyType)
             rvalue_type = self.expr_checker.accept(rvalue, lvalue_type,
@@ -1766,7 +1884,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                 self.msg.deleted_as_rvalue(rvalue_type, context)
             if isinstance(lvalue_type, DeletedType):
                 self.msg.deleted_as_lvalue(lvalue_type, context)
-            else:
+            elif lvalue_type:
                 self.check_subtype(rvalue_type, lvalue_type, context, msg,
                                    '{} has type'.format(rvalue_name),
                                    '{} has type'.format(lvalue_name))
@@ -1805,7 +1923,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         dunder_set = attribute_type.type.get_method('__set__')
         if dunder_set is None:
             self.msg.fail("{}.__set__ is not callable".format(attribute_type), context)
-            return AnyType(), False
+            return AnyType(TypeOfAny.from_error), False
 
         function = function_type(dunder_set, self.named_type('builtins.function'))
         bound_method = bind_self(function, attribute_type)
@@ -1818,11 +1936,11 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
 
         if not isinstance(inferred_dunder_set_type, CallableType):
             self.fail("__set__ is not callable", context)
-            return AnyType(), True
+            return AnyType(TypeOfAny.from_error), True
 
         if len(inferred_dunder_set_type.arg_types) < 2:
             # A message already will have been recorded in check_call
-            return AnyType(), False
+            return AnyType(TypeOfAny.from_error), False
 
         return inferred_dunder_set_type.arg_types[1], False
 
@@ -1877,7 +1995,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                         if not self.current_node_deferred:
                             var.type = self.named_generic_type('builtins.dict',
                                                                [full_key_type, full_value_type])
-                        del partial_types[var]
+                            del partial_types[var]
 
     def visit_expression_stmt(self, s: ExpressionStmt) -> None:
         self.expr_checker.accept(s.expr, allow_none_return=True, always_allow_any=True)
@@ -1925,8 +2043,8 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                     # (Unless you asked to be warned in that case, and the
                     # function is not declared to return Any)
                     if (self.options.warn_return_any and
-                            not is_proper_subtype(AnyType(), return_type)):
-                        self.warn(messages.RETURN_ANY.format(return_type), s)
+                            not is_proper_subtype(AnyType(TypeOfAny.special_form), return_type)):
+                        self.msg.incorrectly_returning_any(return_type, s)
                     return
 
                 # Disallow return expressions in functions declared to return
@@ -2111,22 +2229,25 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                     self.accept(s.body)
                 for i in range(len(s.handlers)):
                     with self.binder.frame_context(can_skip=True, fall_through=4):
-                        if s.types[i]:
-                            t = self.check_except_handler_test(s.types[i])
-                            if s.vars[i]:
+                        typ = s.types[i]
+                        if typ:
+                            t = self.check_except_handler_test(typ)
+                            var = s.vars[i]
+                            if var:
                                 # To support local variables, we make this a definition line,
                                 # causing assignment to set the variable's type.
-                                s.vars[i].is_def = True
+                                var.is_def = True
                                 # We also temporarily set current_node_deferred to False to
                                 # make sure the inference happens.
                                 # TODO: Use a better solution, e.g. a
                                 # separate Var for each except block.
                                 am_deferring = self.current_node_deferred
                                 self.current_node_deferred = False
-                                self.check_assignment(s.vars[i], self.temp_node(t, s.vars[i]))
+                                self.check_assignment(var, self.temp_node(t, var))
                                 self.current_node_deferred = am_deferring
                         self.accept(s.handlers[i])
-                        if s.vars[i]:
+                        var = s.vars[i]
+                        if var:
                             # Exception variables are deleted in python 3 but not python 2.
                             # But, since it's bad form in python 2 and the type checking
                             # wouldn't work very well, we delete it anyway.
@@ -2134,14 +2255,13 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                             # Unfortunately, this doesn't let us detect usage before the
                             # try/except block.
                             if self.options.python_version[0] >= 3:
-                                source = s.vars[i].name
+                                source = var.name
                             else:
                                 source = ('(exception variable "{}", which we do not '
                                           'accept outside except: blocks even in '
-                                          'python 2)'.format(s.vars[i].name))
-                            var = cast(Var, s.vars[i].node)
-                            var.type = DeletedType(source=source)
-                            self.binder.cleanse(s.vars[i])
+                                          'python 2)'.format(var.name))
+                            cast(Var, var.node).type = DeletedType(source=source)
+                            self.binder.cleanse(var)
             if s.else_body:
                 self.accept(s.else_body)
 
@@ -2161,17 +2281,17 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                 item = ttype.items()[0]
                 if not item.is_type_obj():
                     self.fail(messages.INVALID_EXCEPTION_TYPE, n)
-                    return AnyType()
+                    return AnyType(TypeOfAny.from_error)
                 exc_type = item.ret_type
             elif isinstance(ttype, TypeType):
                 exc_type = ttype.item
             else:
                 self.fail(messages.INVALID_EXCEPTION_TYPE, n)
-                return AnyType()
+                return AnyType(TypeOfAny.from_error)
 
             if not is_subtype(exc_type, self.named_type('builtins.BaseException')):
                 self.fail(messages.INVALID_EXCEPTION_TYPE, n)
-                return AnyType()
+                return AnyType(TypeOfAny.from_error)
 
             all_types.append(exc_type)
 
@@ -2209,7 +2329,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
 
         self.check_subtype(iterable,
                            self.named_generic_type('typing.AsyncIterable',
-                                                   [AnyType()]),
+                                                   [AnyType(TypeOfAny.special_form)]),
                            expr, messages.ASYNC_ITERABLE_EXPECTED)
 
         method = echk.analyze_external_member_access('__aiter__', iterable, expr)
@@ -2233,7 +2353,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             # Non-tuple iterable.
             self.check_subtype(iterable,
                                self.named_generic_type('typing.Iterable',
-                                                       [AnyType()]),
+                                                       [AnyType(TypeOfAny.special_form)]),
                                expr, messages.ITERABLE_EXPECTED)
 
             method = echk.analyze_external_member_access('__iter__', iterable,
@@ -2271,13 +2391,15 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         for d in e.decorators:
             if isinstance(d, RefExpr):
                 if d.fullname == 'typing.no_type_check':
-                    e.var.type = AnyType()
+                    e.var.type = AnyType(TypeOfAny.special_form)
                     e.var.is_ready = True
                     return
 
-        e.func.accept(self)
+        self.check_func_item(e.func, name=e.func.name())
+
+        # Process decorators from the inside out to determine decorated signature, which
+        # may be different from the declared signature.
         sig = self.function_type(e.func)  # type: Type
-        # Process decorators from the inside out.
         for d in reversed(e.decorators):
             if refers_to_fullname(d, 'typing.overload'):
                 self.fail('Single overload definition, multiple required', e)
@@ -2287,6 +2409,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             fullname = None
             if isinstance(d, RefExpr):
                 fullname = d.fullname
+            self.check_for_untyped_decorator(e.func, dec, d)
             sig, t2 = self.expr_checker.check_call(dec, [temp],
                                                    [nodes.ARG_POS], e,
                                                    callable_name=fullname)
@@ -2297,6 +2420,17 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         e.var.is_ready = True
         if e.func.is_property:
             self.check_incompatible_property_override(e)
+        if e.func.info and not e.func.is_dynamic():
+            self.check_method_override(e)
+
+    def check_for_untyped_decorator(self,
+                                    func: FuncDef,
+                                    dec_type: Type,
+                                    dec_expr: Expression) -> None:
+        if (self.options.disallow_untyped_decorators and
+                is_typed_callable(func.type) and
+                is_untyped_decorator(dec_type)):
+            self.msg.typed_function_untyped_decorator(func.name(), dec_expr)
 
     def check_incompatible_property_override(self, e: Decorator) -> None:
         if not e.var.is_settable_property and e.func.info is not None:
@@ -2326,7 +2460,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         if mypy.checkexpr.has_any_type(typ):
             self.msg.untyped_decorated_function(typ, func)
 
-    def check_async_with_item(self, expr: Expression, target: Expression,
+    def check_async_with_item(self, expr: Expression, target: Optional[Expression],
                               infer_lvalue_type: bool) -> None:
         echk = self.expr_checker
         ctx = echk.accept(expr)
@@ -2337,12 +2471,12 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         if target:
             self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type)
         exit = echk.analyze_external_member_access('__aexit__', ctx, expr)
-        arg = self.temp_node(AnyType(), expr)
+        arg = self.temp_node(AnyType(TypeOfAny.special_form), expr)
         res = echk.check_call(exit, [arg] * 3, [nodes.ARG_POS] * 3, expr)[0]
         echk.check_awaitable_expr(
             res, expr, messages.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT)
 
-    def check_with_item(self, expr: Expression, target: Expression,
+    def check_with_item(self, expr: Expression, target: Optional[Expression],
                         infer_lvalue_type: bool) -> None:
         echk = self.expr_checker
         ctx = echk.accept(expr)
@@ -2351,7 +2485,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         if target:
             self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type)
         exit = echk.analyze_external_member_access('__exit__', ctx, expr)
-        arg = self.temp_node(AnyType(), expr)
+        arg = self.temp_node(AnyType(TypeOfAny.special_form), expr)
         echk.check_call(exit, [arg] * 3, [nodes.ARG_POS] * 3, expr)
 
     def visit_print_stmt(self, s: PrintStmt) -> None:
@@ -2376,8 +2510,8 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
 
     def check_subtype(self, subtype: Type, supertype: Type, context: Context,
                       msg: str = messages.INCOMPATIBLE_TYPES,
-                      subtype_label: str = None,
-                      supertype_label: str = None) -> bool:
+                      subtype_label: Optional[str] = None,
+                      supertype_label: Optional[str] = None) -> bool:
         """Generate an error if the subtype is not compatible with
         supertype."""
         if is_subtype(subtype, supertype):
@@ -2400,6 +2534,13 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             self.fail(msg, context)
             if note_msg:
                 self.note(note_msg, context)
+            if (isinstance(supertype, Instance) and supertype.type.is_protocol and
+                    isinstance(subtype, (Instance, TupleType, TypedDictType))):
+                self.msg.report_protocol_problems(subtype, supertype, context)
+            if isinstance(supertype, CallableType) and isinstance(subtype, Instance):
+                call = find_member('__call__', subtype, subtype)
+                if call:
+                    self.msg.note_call(subtype, call, context)
             return False
 
     def contains_none(self, t: Type) -> bool:
@@ -2423,7 +2564,8 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         sym = self.lookup_qualified(name)
         node = sym.node
         assert isinstance(node, TypeInfo)
-        return Instance(node, [AnyType()] * len(node.defn.type_vars))
+        any_type = AnyType(TypeOfAny.from_omitted_generics)
+        return Instance(node, [any_type] * len(node.defn.type_vars))
 
     def named_generic_type(self, name: str, args: List[Type]) -> Instance:
         """Return an instance with the given name and type arguments.
@@ -2487,7 +2629,9 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
             parts = name.split('.')
             n = self.modules[parts[0]]
             for i in range(1, len(parts) - 1):
-                n = cast(MypyFile, n.names.get(parts[i], None).node)
+                sym = n.names.get(parts[i])
+                assert sym is not None, "Internal error: attempted lookup of unknown name"
+                n = cast(MypyFile, sym.node)
             last = parts[-1]
             if last in n.names:
                 return n.names[last]
@@ -2516,8 +2660,10 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                     # None partial type: assume variable is intended to have type None
                     var.type = NoneTyp()
                 else:
-                    self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
-                    var.type = AnyType()
+                    if var not in self.partial_reported:
+                        self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
+                        self.partial_reported.add(var)
+                    var.type = AnyType(TypeOfAny.from_error)
 
     def find_partial_types(self, var: Var) -> Optional[Dict[Var, Context]]:
         for partial_types in reversed(self.partial_types):
@@ -2525,7 +2671,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
                 return partial_types
         return None
 
-    def temp_node(self, t: Type, context: Context = None) -> TempNode:
+    def temp_node(self, t: Type, context: Optional[Context] = None) -> TempNode:
         """Create a temporary node with the given, fixed type."""
         temp = TempNode(t)
         if context:
@@ -2540,9 +2686,9 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
         """Produce a warning message."""
         self.msg.warn(msg, context)
 
-    def note(self, msg: str, context: Context) -> None:
+    def note(self, msg: str, context: Context, offset: int = 0) -> None:
         """Produce a note."""
-        self.msg.note(msg, context)
+        self.msg.note(msg, context, offset=offset)
 
     def iterable_item_type(self, instance: Instance) -> Type:
         iterable = map_instance_to_supertype(
@@ -2630,7 +2776,7 @@ def conditional_type_map(expr: Expression,
         return {}, {}
 
 
-def partition_by_callable(type: Optional[Type]) -> Tuple[List[Type], List[Type]]:
+def partition_by_callable(type: Type) -> Tuple[List[Type], List[Type]]:
     """Takes in a type and partitions that type into callable subtypes and
     uncallable subtypes.
 
@@ -2661,7 +2807,7 @@ def partition_by_callable(type: Optional[Type]) -> Tuple[List[Type], List[Type]]
 
     if isinstance(type, Instance):
         method = type.type.get_method('__call__')
-        if method:
+        if method and method.type:
             callables, uncallables = partition_by_callable(method.type)
             if len(callables) and not len(uncallables):
                 # Only consider the type callable if its __call__ method is
@@ -2740,9 +2886,9 @@ def and_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
     # arbitrarily give precedence to m2. (In the future, we could use
     # an intersection type.)
     result = m2.copy()
-    m2_keys = set(n2.literal_hash for n2 in m2)
+    m2_keys = set(literal_hash(n2) for n2 in m2)
     for n1 in m1:
-        if n1.literal_hash not in m2_keys:
+        if literal_hash(n1) not in m2_keys:
             result[n1] = m1[n1]
     return result
 
@@ -2764,13 +2910,13 @@ def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
     result = {}
     for n1 in m1:
         for n2 in m2:
-            if n1.literal_hash == n2.literal_hash:
+            if literal_hash(n1) == literal_hash(n2):
                 result[n1] = UnionType.make_simplified_union([m1[n1], m2[n2]])
     return result
 
 
 def convert_to_typetype(type_map: TypeMap) -> TypeMap:
-    converted_type_map = {}  # type: TypeMap
+    converted_type_map = {}  # type: Dict[Expression, Type]
     if type_map is None:
         return None
     for expr, typ in type_map.items():
@@ -2804,13 +2950,13 @@ def find_isinstance_check(node: Expression,
             if len(node.args) != 2:  # the error will be reported later
                 return {}, {}
             expr = node.args[0]
-            if expr.literal == LITERAL_TYPE:
+            if literal(expr) == LITERAL_TYPE:
                 vartype = type_map[expr]
                 type = get_isinstance_type(node.args[1], type_map)
                 return conditional_type_map(expr, vartype, type)
         elif refers_to_fullname(node.callee, 'builtins.issubclass'):
             expr = node.args[0]
-            if expr.literal == LITERAL_TYPE:
+            if literal(expr) == LITERAL_TYPE:
                 vartype = type_map[expr]
                 type = get_isinstance_type(node.args[1], type_map)
                 if isinstance(vartype, UnionType):
@@ -2834,7 +2980,7 @@ def find_isinstance_check(node: Expression,
                 return yes_map, no_map
         elif refers_to_fullname(node.callee, 'builtins.callable'):
             expr = node.args[0]
-            if expr.literal == LITERAL_TYPE:
+            if literal(expr) == LITERAL_TYPE:
                 vartype = type_map[expr]
                 return conditional_callable_type_map(expr, vartype)
     elif isinstance(node, ComparisonExpr) and experiments.STRICT_OPTIONAL:
@@ -2844,7 +2990,8 @@ def find_isinstance_check(node: Expression,
             if_vars = {}  # type: TypeMap
             else_vars = {}  # type: TypeMap
             for expr in node.operands:
-                if expr.literal == LITERAL_TYPE and not is_literal_none(expr) and expr in type_map:
+                if (literal(expr) == LITERAL_TYPE and not is_literal_none(expr)
+                        and expr in type_map):
                     # This should only be true at most once: there should be
                     # two elements in node.operands, and at least one of them
                     # should represent a None.
@@ -3004,6 +3151,11 @@ def is_unsafe_overlapping_signatures(signature: Type, other: Type) -> bool:
             # latter will never be called
             if is_more_general_arg_prefix(signature, other):
                 return False
+            # Special case: all args are subtypes, and returns are subtypes
+            if (all(is_proper_subtype(s, o)
+                    for (s, o) in zip(signature.arg_types, other.arg_types)) and
+                    is_proper_subtype(signature.ret_type, other.ret_type)):
+                return False
             return not is_more_precise_signature(signature, other)
     return True
 
@@ -3138,7 +3290,7 @@ def is_valid_inferred_type_component(typ: Type) -> bool:
     return True
 
 
-def is_node_static(node: Node) -> Optional[bool]:
+def is_node_static(node: Optional[Node]) -> Optional[bool]:
     """Find out if a node describes a static function method."""
 
     if isinstance(node, FuncDef):
@@ -3190,3 +3342,26 @@ class Scope:
 @contextmanager
 def nothing() -> Iterator[None]:
     yield
+
+
+def is_typed_callable(c: Optional[Type]) -> bool:
+    if not c or not isinstance(c, CallableType):
+        return False
+    return not all(isinstance(t, AnyType) and t.type_of_any == TypeOfAny.unannotated
+                   for t in c.arg_types + [c.ret_type])
+
+
+def is_untyped_decorator(typ: Optional[Type]) -> bool:
+    if not typ or not isinstance(typ, CallableType):
+        return True
+    return typ.implicit
+
+
+def is_static(func: Union[FuncBase, Decorator]) -> bool:
+    if isinstance(func, Decorator):
+        return is_static(func.func)
+    elif isinstance(func, OverloadedFuncDef):
+        return any(is_static(item) for item in func.items)
+    elif isinstance(func, FuncItem):
+        return func.is_static
+    return False
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 13e195d..e35ab90 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -1,14 +1,14 @@
 """Expression type checker. This file is conceptually part of TypeChecker."""
 
 from collections import OrderedDict
-from typing import cast, Dict, Set, List, Tuple, Callable, Union, Optional
+from typing import cast, Dict, Set, List, Tuple, Callable, Union, Optional, Iterable, Sequence, Any
 
 from mypy.errors import report_internal_error
 from mypy.typeanal import has_any_from_unimported_type, check_for_explicit_any, set_any_tvars
 from mypy.types import (
     Type, AnyType, CallableType, Overloaded, NoneTyp, TypeVarDef,
     TupleType, TypedDictType, Instance, TypeVarType, ErasedType, UnionType,
-    PartialType, DeletedType, UnboundType, UninhabitedType, TypeType,
+    PartialType, DeletedType, UnboundType, UninhabitedType, TypeType, TypeOfAny,
     true_only, false_only, is_named_instance, function_type, callable_type, FunctionLike,
     get_typ_args, set_typ_args,
     StarType)
@@ -24,6 +24,7 @@ from mypy.nodes import (
     TypeAliasExpr, BackquoteExpr, EnumCallExpr,
     ARG_POS, ARG_NAMED, ARG_STAR, ARG_STAR2, MODULE_REF, TVAR, LITERAL_TYPE,
 )
+from mypy.literals import literal
 from mypy import nodes
 import mypy.checker
 from mypy import types
@@ -35,7 +36,7 @@ from mypy.infer import infer_type_arguments, infer_function_type_arguments
 from mypy import join
 from mypy.meet import narrow_declared_type
 from mypy.maptype import map_instance_to_supertype
-from mypy.subtypes import is_subtype, is_equivalent
+from mypy.subtypes import is_subtype, is_equivalent, find_member, non_method_protocol_members
 from mypy import applytype
 from mypy import erasetype
 from mypy.checkmember import analyze_member_access, type_object_type, bind_self
@@ -145,7 +146,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                     if partial_types is not None and not self.chk.current_node_deferred:
                         context = partial_types[node]
                         self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
-                    result = AnyType()
+                    result = AnyType(TypeOfAny.special_form)
         elif isinstance(node, FuncDef):
             # Reference to a global function.
             result = function_type(node, self.named_type('builtins.function'))
@@ -154,6 +155,10 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         elif isinstance(node, TypeInfo):
             # Reference to a type object.
             result = type_object_type(node, self.named_type)
+            if isinstance(self.type_context[-1], TypeType):
+                # This is the type in a Type[] expression, so substitute type
+                # variables with Any.
+                result = erasetype.erase_typevars(result)
         elif isinstance(node, MypyFile):
             # Reference to a module object.
             try:
@@ -168,7 +173,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         else:
             # Unknown reference; use any type implicitly to avoid
             # generating extra type errors.
-            result = AnyType()
+            result = AnyType(TypeOfAny.from_error)
         assert result is not None
         return result
 
@@ -179,7 +184,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             if not var.is_ready and self.chk.in_checked_function():
                 self.chk.handle_cannot_determine_type(var.name(), context)
             # Implicit 'Any' type.
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
 
     def visit_call_expr(self, e: CallExpr, allow_none_return: bool = False) -> Type:
         """Type check a call expression."""
@@ -196,11 +201,12 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 and len(e.args) == 2):
             for typ in mypy.checker.flatten(e.args[1]):
                 if isinstance(typ, NameExpr):
+                    node = None
                     try:
                         node = self.chk.lookup_qualified(typ.name)
                     except KeyError:
                         # Undefined names should already be reported in semantic analysis.
-                        node = None
+                        pass
                 if ((isinstance(typ, IndexExpr)
                         and isinstance(typ.analyzed, (TypeApplication, TypeAliasExpr)))
                         # node.kind == TYPE_ALIAS only for aliases like It = Iterable[int].
@@ -212,13 +218,25 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                     elif typ.node.is_newtype:
                         self.msg.fail(messages.CANNOT_ISINSTANCE_NEWTYPE, e)
         self.try_infer_partial_type(e)
-        callee_type = self.accept(e.callee, always_allow_any=True)
+        type_context = None
+        if isinstance(e.callee, LambdaExpr):
+            formal_to_actual = map_actuals_to_formals(
+                e.arg_kinds, e.arg_names,
+                e.callee.arg_kinds, e.callee.arg_names,
+                lambda i: self.accept(e.args[i]))
+
+            arg_types = [join.join_type_list([self.accept(e.args[j]) for j in formal_to_actual[i]])
+                         for i in range(len(e.callee.arg_kinds))]
+            type_context = CallableType(arg_types, e.callee.arg_kinds, e.callee.arg_names,
+                                        ret_type=self.object_type(),
+                                        fallback=self.named_type('builtins.function'))
+        callee_type = self.accept(e.callee, type_context, always_allow_any=True)
         if (self.chk.options.disallow_untyped_calls and
                 self.chk.in_checked_function() and
                 isinstance(callee_type, CallableType)
                 and callee_type.implicit):
             return self.msg.untyped_function_call(callee_type, e)
-        # Figure out the full name of the callee for plugin loopup.
+        # Figure out the full name of the callee for plugin lookup.
         object_type = None
         if not isinstance(e.callee, RefExpr):
             fullname = None
@@ -242,24 +260,50 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                     # Apply plugin signature hook that may generate a better signature.
                     signature_hook = self.plugin.get_method_signature_hook(fullname)
                     if signature_hook:
+                        assert object_type is not None
                         callee_type = self.apply_method_signature_hook(
                             e, callee_type, object_type, signature_hook)
         ret_type = self.check_call_expr_with_callee_type(callee_type, e, fullname, object_type)
+        if isinstance(e.callee, RefExpr) and len(e.args) == 2:
+            if e.callee.fullname in ('builtins.isinstance', 'builtins.issubclass'):
+                self.check_runtime_protocol_test(e)
+            if e.callee.fullname == 'builtins.issubclass':
+                self.check_protocol_issubclass(e)
         if isinstance(ret_type, UninhabitedType):
             self.chk.binder.unreachable()
         if not allow_none_return and isinstance(ret_type, NoneTyp):
             self.chk.msg.does_not_return_value(callee_type, e)
-            return AnyType(implicit=True)
+            return AnyType(TypeOfAny.from_error)
         return ret_type
 
+    def check_runtime_protocol_test(self, e: CallExpr) -> None:
+        for expr in mypy.checker.flatten(e.args[1]):
+            tp = self.chk.type_map[expr]
+            if (isinstance(tp, CallableType) and tp.is_type_obj() and
+                    tp.type_object().is_protocol and
+                    not tp.type_object().runtime_protocol):
+                self.chk.fail('Only @runtime protocols can be used with'
+                              ' instance and class checks', e)
+
+    def check_protocol_issubclass(self, e: CallExpr) -> None:
+        for expr in mypy.checker.flatten(e.args[1]):
+            tp = self.chk.type_map[expr]
+            if (isinstance(tp, CallableType) and tp.is_type_obj() and
+                    tp.type_object().is_protocol):
+                attr_members = non_method_protocol_members(tp.type_object())
+                if attr_members:
+                    self.chk.msg.report_non_method_protocol(tp.type_object(),
+                                                            attr_members, e)
+
     def check_typeddict_call(self, callee: TypedDictType,
                              arg_kinds: List[int],
-                             arg_names: List[str],
+                             arg_names: Sequence[Optional[str]],
                              args: List[Expression],
                              context: Context) -> Type:
         if len(args) >= 1 and all([ak == ARG_NAMED for ak in arg_kinds]):
             # ex: Point(x=42, y=1337)
-            item_names = arg_names
+            assert all(arg_name is not None for arg_name in arg_names)
+            item_names = cast(List[str], arg_names)
             item_args = args
             return self.check_typeddict_call_with_kwargs(
                 callee, OrderedDict(zip(item_names, item_args)), context)
@@ -279,7 +323,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 callee, OrderedDict(), context)
 
         self.chk.fail(messages.INVALID_TYPEDDICT_ARGS, context)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     def check_typeddict_call_with_dict(self, callee: TypedDictType,
                                        kwargs: DictExpr,
@@ -291,7 +335,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         for item_name_expr in item_name_exprs:
             if not isinstance(item_name_expr, StrExpr):
                 self.chk.fail(messages.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, item_name_expr)
-                return AnyType()
+                return AnyType(TypeOfAny.from_error)
             item_names.append(item_name_expr.value)
 
         return self.check_typeddict_call_with_kwargs(
@@ -309,7 +353,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 expected_keys=expected_keys,
                 actual_keys=list(actual_keys),
                 context=context)
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
 
         for (item_name, item_expected_type) in callee.items.items():
             if item_name in kwargs:
@@ -460,9 +504,9 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
     def check_call(self, callee: Type, args: List[Expression],
                    arg_kinds: List[int], context: Context,
-                   arg_names: List[str] = None,
-                   callable_node: Expression = None,
-                   arg_messages: MessageBuilder = None,
+                   arg_names: Optional[Sequence[Optional[str]]] = None,
+                   callable_node: Optional[Expression] = None,
+                   arg_messages: Optional[MessageBuilder] = None,
                    callable_name: Optional[str] = None,
                    object_type: Optional[Type] = None) -> Tuple[Type, Type]:
         """Type check a call.
@@ -502,6 +546,11 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 self.msg.cannot_instantiate_abstract_class(
                     callee.type_object().name(), type.abstract_attributes,
                     context)
+            elif (callee.is_type_obj() and callee.type_object().is_protocol
+                  # Exceptions for Type[...] and classmethod first argument
+                  and not callee.from_type_type and not callee.is_classmethod_class):
+                self.chk.fail('Cannot instantiate protocol class "{}"'
+                              .format(callee.type_object().name()), context)
 
             formal_to_actual = map_actuals_to_formals(
                 arg_kinds, arg_names,
@@ -559,7 +608,11 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                                    object_type=object_type)
         elif isinstance(callee, AnyType) or not self.chk.in_checked_function():
             self.infer_arg_types_in_context(None, args)
-            return AnyType(), AnyType()
+            if isinstance(callee, AnyType):
+                return (AnyType(TypeOfAny.from_another_any, source_any=callee),
+                        AnyType(TypeOfAny.from_another_any, source_any=callee))
+            else:
+                return AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form)
         elif isinstance(callee, UnionType):
             self.msg.disable_type_names += 1
             results = [self.check_call(subtype, args, arg_kinds, context, arg_names,
@@ -584,7 +637,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             return self.check_call(item, args, arg_kinds, context, arg_names,
                                    callable_node, arg_messages)
         else:
-            return self.msg.not_callable(callee, context), AnyType()
+            return self.msg.not_callable(callee, context), AnyType(TypeOfAny.from_error)
 
     def analyze_type_type_callee(self, item: Type, context: Context) -> Type:
         """Analyze the callee X in X(...) where X is Type[item].
@@ -592,7 +645,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         Return a Y that we can pass to check_call(Y, ...).
         """
         if isinstance(item, AnyType):
-            return AnyType()
+            return AnyType(TypeOfAny.from_another_any, source_any=item)
         if isinstance(item, Instance):
             res = type_object_type(item.type, self.named_type)
             if isinstance(res, CallableType):
@@ -623,7 +676,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 return callee
 
         self.msg.unsupported_type_type(item, context)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     def infer_arg_types_in_context(self, callee: Optional[CallableType],
                                    args: List[Expression]) -> List[Type]:
@@ -639,8 +692,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         if callee:
             fixed = min(fixed, callee.max_fixed_args())
 
-        arg_type = None  # type: Type
-        ctx = None  # type: Type
+        ctx = None
         for i, arg in enumerate(args):
             if i < fixed:
                 if callee and i < len(callee.arg_types):
@@ -667,7 +719,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
         Returns the inferred types of *actual arguments*.
         """
-        res = [None] * len(args)  # type: List[Type]
+        dummy = None  # type: Any
+        res = [dummy] * len(args)  # type: List[Type]
 
         for i, actuals in enumerate(formal_to_actual):
             for ai in actuals:
@@ -714,7 +767,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 ret_type = NoneTyp()
         args = infer_type_arguments(callable.type_var_ids(), ret_type, erased_ctx)
         # Only substitute non-Uninhabited and non-erased types.
-        new_args = []  # type: List[Type]
+        new_args = []  # type: List[Optional[Type]]
         for arg in args:
             if isinstance(arg, UninhabitedType) or has_erased_component(arg):
                 new_args.append(None)
@@ -757,7 +810,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
             inferred_args = infer_function_type_arguments(
                 callee_type, pass1_args, arg_kinds, formal_to_actual,
-                strict=self.chk.in_checked_function())  # type: List[Type]
+                strict=self.chk.in_checked_function())
 
             if 2 in arg_pass_nums:
                 # Second pass of type inference.
@@ -774,15 +827,16 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 #       if they shuffle type variables around, as we assume that there is a 1-1
                 #       correspondence with dict type variables. This is a marginal issue and
                 #       a little tricky to fix so it's left unfixed for now.
-                if isinstance(inferred_args[0], (NoneTyp, UninhabitedType)):
+                first_arg = inferred_args[0]
+                if isinstance(first_arg, (NoneTyp, UninhabitedType)):
                     inferred_args[0] = self.named_type('builtins.str')
-                elif not is_subtype(self.named_type('builtins.str'), inferred_args[0]):
+                elif not first_arg or not is_subtype(self.named_type('builtins.str'), first_arg):
                     self.msg.fail(messages.KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE,
                                   context)
         else:
             # In dynamically typed functions use implicit 'Any' types for
             # type variables.
-            inferred_args = [AnyType()] * len(callee_type.variables)
+            inferred_args = [AnyType(TypeOfAny.unannotated)] * len(callee_type.variables)
         return self.apply_inferred_arguments(callee_type, inferred_args,
                                              context)
 
@@ -791,8 +845,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             args: List[Expression],
             arg_kinds: List[int],
             formal_to_actual: List[List[int]],
-            inferred_args: List[Type],
-            context: Context) -> Tuple[CallableType, List[Type]]:
+            old_inferred_args: Sequence[Optional[Type]],
+            context: Context) -> Tuple[CallableType, List[Optional[Type]]]:
         """Perform second pass of generic function type argument inference.
 
         The second pass is needed for arguments with types such as Callable[[T], S],
@@ -807,6 +861,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         # None or erased types in inferred types mean that there was not enough
         # information to infer the argument. Replace them with None values so
         # that they are not applied yet below.
+        inferred_args = list(old_inferred_args)
         for i, arg in enumerate(inferred_args):
             if isinstance(arg, (NoneTyp, UninhabitedType)) or has_erased_component(arg):
                 inferred_args[i] = None
@@ -839,7 +894,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         return res
 
     def apply_inferred_arguments(self, callee_type: CallableType,
-                                 inferred_args: List[Type],
+                                 inferred_args: Sequence[Optional[Type]],
                                  context: Context) -> CallableType:
         """Apply inferred values of type arguments to a generic function.
 
@@ -853,21 +908,22 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 # Could not infer a non-trivial type for a type variable.
                 self.msg.could_not_infer_type_arguments(
                     callee_type, i + 1, context)
-                inferred_args = [AnyType()] * len(inferred_args)
+                inferred_args = [AnyType(TypeOfAny.from_error)] * len(inferred_args)
         # Apply the inferred types to the function type. In this case the
         # return type must be CallableType, since we give the right number of type
         # arguments.
         return self.apply_generic_arguments(callee_type, inferred_args, context)
 
     def check_argument_count(self, callee: CallableType, actual_types: List[Type],
-                             actual_kinds: List[int], actual_names: List[str],
+                             actual_kinds: List[int],
+                             actual_names: Optional[Sequence[Optional[str]]],
                              formal_to_actual: List[List[int]],
-                             context: Context,
+                             context: Optional[Context],
                              messages: Optional[MessageBuilder]) -> bool:
         """Check that there is a value for all required arguments to a function.
 
         Also check that there are no duplicate values for arguments. Report found errors
-        using 'messages' if it's not None.
+        using 'messages' if it's not None. If 'messages' is given, 'context' must also be given.
 
         Return False if there were any errors. Otherwise return True
         """
@@ -889,11 +945,16 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 ok = False
                 if kind != nodes.ARG_NAMED:
                     if messages:
+                        assert context, "Internal error: messages given without context"
                         messages.too_many_arguments(callee, context)
                 else:
                     if messages:
+                        assert context, "Internal error: messages given without context"
+                        assert actual_names, "Internal error: named kinds without names given"
+                        act_name = actual_names[i]
+                        assert act_name is not None
                         messages.unexpected_keyword_argument(
-                            callee, actual_names[i], context)
+                            callee, act_name, context)
                     is_unexpected_arg_error = True
             elif kind == nodes.ARG_STAR and (
                     nodes.ARG_STAR not in formal_kinds):
@@ -902,6 +963,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                     if all_actuals.count(i) < len(actual_type.items):
                         # Too many tuple items as some did not match.
                         if messages:
+                            assert context, "Internal error: messages given without context"
                             messages.too_many_arguments(callee, context)
                         ok = False
                 # *args can be applied even if the function takes a fixed
@@ -912,13 +974,17 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                                           not is_unexpected_arg_error):
                 # No actual for a mandatory positional formal.
                 if messages:
+                    assert context, "Internal error: messages given without context"
                     messages.too_few_arguments(callee, context, actual_names)
                 ok = False
             elif kind == nodes.ARG_NAMED and (not formal_to_actual[i] and
                                               not is_unexpected_arg_error):
                 # No actual for a mandatory named formal
                 if messages:
-                    messages.missing_named_argument(callee, context, callee.arg_names[i])
+                    argname = callee.arg_names[i]
+                    assert argname is not None
+                    assert context, "Internal error: messages given without context"
+                    messages.missing_named_argument(callee, context, argname)
                 ok = False
             elif kind in [nodes.ARG_POS, nodes.ARG_OPT,
                           nodes.ARG_NAMED, nodes.ARG_NAMED_OPT] and is_duplicate_mapping(
@@ -926,12 +992,14 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 if (self.chk.in_checked_function() or
                         isinstance(actual_types[formal_to_actual[i][0]], TupleType)):
                     if messages:
+                        assert context, "Internal error: messages given without context"
                         messages.duplicate_argument_value(callee, i, context)
                     ok = False
             elif (kind in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT) and formal_to_actual[i] and
                   actual_kinds[formal_to_actual[i][0]] not in [nodes.ARG_NAMED, nodes.ARG_STAR2]):
                 # Positional argument when expecting a keyword argument.
                 if messages:
+                    assert context, "Internal error: messages given without context"
                     messages.too_many_positional_arguments(callee, context)
                 ok = False
         return ok
@@ -940,8 +1008,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                              callee: CallableType,
                              formal_to_actual: List[List[int]],
                              context: Context,
-                             messages: MessageBuilder = None,
-                             check_arg: ArgChecker = None) -> None:
+                             messages: Optional[MessageBuilder] = None,
+                             check_arg: Optional[ArgChecker] = None) -> None:
         """Check argument types against a callable type.
 
         Report errors if the argument types are not compatible.
@@ -997,24 +1065,33 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         """Check the type of a single argument in a call."""
         if isinstance(caller_type, DeletedType):
             messages.deleted_as_rvalue(caller_type, context)
-        # Only non-abstract class can be given where Type[...] is expected...
+        # Only non-abstract non-protocol class can be given where Type[...] is expected...
         elif (isinstance(caller_type, CallableType) and isinstance(callee_type, TypeType) and
-              caller_type.is_type_obj() and caller_type.type_object().is_abstract and
-              isinstance(callee_type.item, Instance) and callee_type.item.type.is_abstract and
+              caller_type.is_type_obj() and
+              (caller_type.type_object().is_abstract or caller_type.type_object().is_protocol) and
+              isinstance(callee_type.item, Instance) and
+              (callee_type.item.type.is_abstract or callee_type.item.type.is_protocol) and
               # ...except for classmethod first argument
               not caller_type.is_classmethod_class):
-            messages.fail("Only non-abstract class can be given where '{}' is expected"
-                          .format(callee_type), context)
+            self.msg.concrete_only_call(callee_type, context)
         elif not is_subtype(caller_type, callee_type):
             if self.chk.should_suppress_optional_error([caller_type, callee_type]):
                 return
             messages.incompatible_argument(n, m, callee, original_caller_type,
                                            caller_kind, context)
+            if (isinstance(original_caller_type, (Instance, TupleType, TypedDictType)) and
+                    isinstance(callee_type, Instance) and callee_type.type.is_protocol):
+                self.msg.report_protocol_problems(original_caller_type, callee_type, context)
+            if (isinstance(callee_type, CallableType) and
+                    isinstance(original_caller_type, Instance)):
+                call = find_member('__call__', original_caller_type, original_caller_type)
+                if call:
+                    self.msg.note_call(original_caller_type, call, context)
 
     def overload_call_target(self, arg_types: List[Type], arg_kinds: List[int],
-                             arg_names: List[str],
+                             arg_names: Optional[Sequence[Optional[str]]],
                              overload: Overloaded, context: Context,
-                             messages: MessageBuilder = None) -> Type:
+                             messages: Optional[MessageBuilder] = None) -> Type:
         """Infer the correct overload item to call with given argument types.
 
         The return value may be CallableType or AnyType (if an unique item
@@ -1031,8 +1108,10 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             if similarity > 0 and similarity >= best_match:
                 if (match and not is_same_type(match[-1].ret_type,
                                                typ.ret_type) and
-                    not mypy.checker.is_more_precise_signature(
-                        match[-1], typ)):
+                    (not mypy.checker.is_more_precise_signature(match[-1], typ)
+                     or (any(isinstance(arg, AnyType) for arg in arg_types)
+                         and any_arg_causes_overload_ambiguity(
+                             match + [typ], arg_types, arg_kinds, arg_names)))):
                     # Ambiguous return type. Either the function overload is
                     # overlapping (which we don't handle very well here) or the
                     # caller has provided some Any argument types; in either
@@ -1051,14 +1130,14 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                     #
                     # TODO: Consider returning a union type instead if the
                     #       overlapping is NOT due to Any types?
-                    return AnyType()
+                    return AnyType(TypeOfAny.special_form)
                 else:
                     match.append(typ)
                 best_match = max(best_match, similarity)
         if not match:
             if not self.chk.should_suppress_optional_error(arg_types):
                 messages.no_variant_matches_arguments(overload, arg_types, context)
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         else:
             if len(match) == 1:
                 return match[0]
@@ -1073,7 +1152,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 return match[0]
 
     def erased_signature_similarity(self, arg_types: List[Type], arg_kinds: List[int],
-                                    arg_names: List[str], callee: CallableType,
+                                    arg_names: Optional[Sequence[Optional[str]]],
+                                    callee: CallableType,
                                     context: Context) -> int:
         """Determine whether arguments could match the signature at runtime.
 
@@ -1113,7 +1193,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         return similarity
 
     def match_signature_types(self, arg_types: List[Type], arg_kinds: List[int],
-                              arg_names: List[str], callee: CallableType,
+                              arg_names: Optional[Sequence[Optional[str]]], callee: CallableType,
                               context: Context) -> bool:
         """Determine whether arguments types match the signature.
 
@@ -1139,7 +1219,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                                   context=context, check_arg=check_arg)
         return ok
 
-    def apply_generic_arguments(self, callable: CallableType, types: List[Type],
+    def apply_generic_arguments(self, callable: CallableType, types: Sequence[Optional[Type]],
                                 context: Context) -> CallableType:
         """Simple wrapper around mypy.applytype.apply_generic_arguments."""
         return applytype.apply_generic_arguments(callable, types, self.msg, context)
@@ -1192,13 +1272,12 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
         if dunder_get is None:
             self.msg.fail("{}.__get__ is not callable".format(descriptor_type), context)
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
 
         function = function_type(dunder_get, self.named_type('builtins.function'))
         bound_method = bind_self(function, descriptor_type)
         typ = map_instance_to_supertype(descriptor_type, dunder_get.info)
         dunder_get_type = expand_type_by_instance(bound_method, typ)
-        owner_type = None  # type: Type
 
         if isinstance(instance_type, FunctionLike) and instance_type.is_type_obj():
             owner_type = instance_type.items()[0].ret_type
@@ -1220,7 +1299,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
         if not isinstance(inferred_dunder_get_type, CallableType):
             self.msg.fail("{}.__get__ is not callable".format(descriptor_type), context)
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
 
         return inferred_dunder_get_type.ret_type
 
@@ -1304,13 +1383,13 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         Comparison expressions are type checked consecutive-pair-wise
         That is, 'a < b > c == d' is check as 'a < b and b > c and c == d'
         """
-        result = None  # type: mypy.types.Type
+        result = None
 
         # Check each consecutive operand pair and their operator
         for left, right, operator in zip(e.operands, e.operands[1:], e.operators):
             left_type = self.accept(left)
 
-            method_type = None  # type: mypy.types.Type
+            method_type = None  # type: Optional[mypy.types.Type]
 
             if operator == 'in' or operator == 'not in':
                 right_type = self.accept(right)  # TODO only evaluate if needed
@@ -1360,6 +1439,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             else:
                 result = join.join_types(result, sub_result)
 
+        assert result is not None
         return result
 
     def get_operator_method(self, op: str) -> str:
@@ -1424,10 +1504,12 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                     # If the right operand has type Any, we can't make any
                     # conjectures about the type of the result, since the
                     # operand could have a __r method that returns anything.
-                    result = AnyType(), result[1]
+                    any_type = AnyType(TypeOfAny.from_another_any, source_any=arg_type)
+                    result = any_type, result[1]
             success = not local_errors.is_errors()
         else:
-            result = AnyType(), AnyType()
+            error_any = AnyType(TypeOfAny.from_error)
+            result = error_any, error_any
             success = False
         if success or not allow_reverse or isinstance(base_type, AnyType):
             # We were able to call the normal variant of the operator method,
@@ -1519,7 +1601,16 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         elif e.right_always:
             left_map = None
 
-        right_type = self.analyze_cond_branch(right_map, e.right, left_type)
+        # If right_map is None then we know mypy considers the right branch
+        # to be unreachable and therefore any errors found in the right branch
+        # should be suppressed.
+        if right_map is None:
+            self.msg.disable_errors()
+        try:
+            right_type = self.analyze_cond_branch(right_map, e.right, left_type)
+        finally:
+            if right_map is None:
+                self.msg.enable_errors()
 
         if right_map is None:
             # The boolean expression is statically known to be the left value
@@ -1607,7 +1698,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                     return left_type.items[n]
                 else:
                     self.chk.fail(messages.TUPLE_INDEX_OUT_OF_RANGE, e)
-                    return AnyType()
+                    return AnyType(TypeOfAny.from_error)
             else:
                 return self.nonliteral_tuple_index_helper(left_type, index)
         elif isinstance(left_type, TypedDictType):
@@ -1621,9 +1712,9 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             return result
 
     def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type:
-        begin = None  # type: int
-        end = None  # type: int
-        stride = None  # type:int
+        begin = None
+        end = None
+        stride = None
 
         if slic.begin_index:
             begin = self._get_value(slic.begin_index)
@@ -1649,7 +1740,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         if not self.chk.check_subtype(index_type, expected_type, index,
                                       messages.INVALID_TUPLE_INDEX_TYPE,
                                       'actual type', 'expected type'):
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         else:
             return UnionType.make_simplified_union(left_type.items)
 
@@ -1666,13 +1757,13 @@ class ExpressionChecker(ExpressionVisitor[Type]):
     def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) -> Type:
         if not isinstance(index, (StrExpr, UnicodeExpr)):
             self.msg.typeddict_key_must_be_string_literal(td_type, index)
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         item_name = index.value
 
         item_type = td_type.items.get(item_name)
         if item_type is None:
             self.msg.typeddict_key_not_found(td_type, item_name, index)
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         return item_type
 
     def visit_enum_index_expr(self, enum_type: TypeInfo, index: Expression,
@@ -1687,8 +1778,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
     def visit_cast_expr(self, expr: CastExpr) -> Type:
         """Type check a cast expression."""
-        source_type = self.accept(expr.expr, type_context=AnyType(), allow_none_return=True,
-                                  always_allow_any=True)
+        source_type = self.accept(expr.expr, type_context=AnyType(TypeOfAny.special_form),
+                                  allow_none_return=True, always_allow_any=True)
         target_type = expr.type
         options = self.chk.options
         if options.warn_redundant_casts and is_same_type(source_type, target_type):
@@ -1704,6 +1795,8 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         revealed_type = self.accept(expr.expr, type_context=self.type_context[-1])
         if not self.chk.current_node_deferred:
             self.msg.reveal_type(revealed_type, expr)
+            if not self.chk.in_checked_function():
+                self.msg.note("'reveal_type' always outputs 'Any' in unchecked functions", expr)
         return revealed_type
 
     def visit_type_application(self, tapp: TypeApplication) -> Type:
@@ -1715,7 +1808,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             if len(tp.variables) != len(tapp.types):
                 self.msg.incompatible_type_application(len(tp.variables),
                                                        len(tapp.types), tapp)
-                return AnyType()
+                return AnyType(TypeOfAny.from_error)
             return self.apply_generic_arguments(tp, tapp.types, tapp)
         elif isinstance(tp, Overloaded):
             if not tp.is_type_obj():
@@ -1724,16 +1817,18 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 if len(item.variables) != len(tapp.types):
                     self.msg.incompatible_type_application(len(item.variables),
                                                            len(tapp.types), tapp)
-                    return AnyType()
+                    return AnyType(TypeOfAny.from_error)
             return Overloaded([self.apply_generic_arguments(item, tapp.types, tapp)
                                for item in tp.items()])
-        return AnyType()
+        if isinstance(tp, AnyType):
+            return AnyType(TypeOfAny.from_another_any, source_any=tp)
+        return AnyType(TypeOfAny.special_form)
 
     def visit_type_alias_expr(self, alias: TypeAliasExpr) -> Type:
         """Get type of a type alias (could be generic) in a runtime expression."""
         if isinstance(alias.type, Instance) and alias.type.invalid:
             # An invalid alias, error already has been reported
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         item = alias.type
         if not alias.in_runtime:
             # We don't replace TypeVar's with Any for alias used as Alias[T](42).
@@ -1750,17 +1845,17 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             if len(tp.variables) != len(item.args):
                 self.msg.incompatible_type_application(len(tp.variables),
                                                        len(item.args), item)
-                return AnyType()
+                return AnyType(TypeOfAny.from_error)
             return self.apply_generic_arguments(tp, item.args, item)
         elif isinstance(tp, Overloaded):
             for it in tp.items():
                 if len(it.variables) != len(item.args):
                     self.msg.incompatible_type_application(len(it.variables),
                                                            len(item.args), item)
-                    return AnyType()
+                    return AnyType(TypeOfAny.from_error)
             return Overloaded([self.apply_generic_arguments(it, item.args, item)
                                for it in tp.items()])
-        return AnyType()
+        return AnyType(TypeOfAny.special_form)
 
     def visit_list_expr(self, e: ListExpr) -> Type:
         """Type check a list expression [...]."""
@@ -1810,7 +1905,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
         if isinstance(type_context, TupleType):
             type_context_items = type_context.items
-        elif is_named_instance(type_context, 'builtins.tuple'):
+        elif type_context and is_named_instance(type_context, 'builtins.tuple'):
             assert isinstance(type_context, Instance)
             if type_context.args:
                 type_context_items = [type_context.args[0]] * len(e.items)
@@ -1825,7 +1920,6 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         j = 0  # Index into type_context_items; irrelevant if type_context_items is none
         for i in range(len(e.items)):
             item = e.items[i]
-            tt = None  # type: Type
             if isinstance(item, StarExpr):
                 # Special handling for star expressions.
                 # TODO: If there's a context, and item.expr is a
@@ -1881,6 +1975,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         vtdef = TypeVarDef('VT', -2, [], self.object_type())
         kt = TypeVarType(ktdef)
         vt = TypeVarType(vtdef)
+        rv = None
         # Call dict(*args), unless it's empty and stargs is not.
         if args or not stargs:
             # The callable type represents a function like this:
@@ -1897,7 +1992,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             rv = self.check_call(constructor, args, [nodes.ARG_POS] * len(args), e)[0]
         else:
             # dict(...) will be called below.
-            rv = None
+            pass
         # Call rv.update(arg) for each arg in **stargs,
         # except if rv isn't set yet, then set rv = dict(arg).
         if stargs:
@@ -1915,9 +2010,10 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 else:
                     method = self.analyze_external_member_access('update', rv, arg)
                     self.check_call(method, [arg], [nodes.ARG_POS], arg)
+        assert rv is not None
         return rv
 
-    def find_typeddict_context(self, context: Type) -> Optional[TypedDictType]:
+    def find_typeddict_context(self, context: Optional[Type]) -> Optional[TypedDictType]:
         if isinstance(context, TypedDictType):
             return context
         elif isinstance(context, UnionType):
@@ -1936,7 +2032,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         """Type check lambda expression."""
         inferred_type, type_override = self.infer_lambda_type_using_context(e)
         if not inferred_type:
-            self.chk.return_types.append(AnyType())
+            self.chk.return_types.append(AnyType(TypeOfAny.special_form))
             # No useful type context.
             ret_type = self.accept(e.expr(), allow_none_return=True)
             fallback = self.named_type('builtins.function')
@@ -1990,7 +2086,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             # Fill in Any arguments to match the arguments of the lambda.
             callable_ctx = callable_ctx.copy_modified(
                 is_ellipsis_args=False,
-                arg_types=[AnyType()] * len(arg_kinds),
+                arg_types=[AnyType(TypeOfAny.special_form)] * len(arg_kinds),
                 arg_kinds=arg_kinds
             )
 
@@ -2006,9 +2102,67 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
     def visit_super_expr(self, e: SuperExpr) -> Type:
         """Type check a super expression (non-lvalue)."""
+        self.check_super_arguments(e)
         t = self.analyze_super(e, False)
         return t
 
+    def check_super_arguments(self, e: SuperExpr) -> None:
+        """Check arguments in a super(...) call."""
+        if ARG_STAR in e.call.arg_kinds:
+            self.chk.fail('Varargs not supported with "super"', e)
+        elif e.call.args and set(e.call.arg_kinds) != {ARG_POS}:
+            self.chk.fail('"super" only accepts positional arguments', e)
+        elif len(e.call.args) == 1:
+            self.chk.fail('"super" with a single argument not supported', e)
+        elif len(e.call.args) > 2:
+            self.chk.fail('Too many arguments for "super"', e)
+        elif self.chk.options.python_version[0] == 2 and len(e.call.args) == 0:
+            self.chk.fail('Too few arguments for "super"', e)
+        elif len(e.call.args) == 2:
+            type_obj_type = self.accept(e.call.args[0])
+            instance_type = self.accept(e.call.args[1])
+            if isinstance(type_obj_type, FunctionLike) and type_obj_type.is_type_obj():
+                type_info = type_obj_type.type_object()
+            elif isinstance(type_obj_type, TypeType):
+                item = type_obj_type.item
+                if isinstance(item, AnyType):
+                    # Could be anything.
+                    return
+                if isinstance(item, TupleType):
+                    item = item.fallback  # Handle named tuples and other Tuple[...] subclasses.
+                if not isinstance(item, Instance):
+                    # A complicated type object type. Too tricky, give up.
+                    # TODO: Do something more clever here.
+                    self.chk.fail('Unsupported argument 1 for "super"', e)
+                    return
+                type_info = item.type
+            elif isinstance(type_obj_type, AnyType):
+                return
+            else:
+                self.msg.first_argument_for_super_must_be_type(type_obj_type, e)
+                return
+
+            if isinstance(instance_type, (Instance, TupleType, TypeVarType)):
+                if isinstance(instance_type, TypeVarType):
+                    # Needed for generic self.
+                    instance_type = instance_type.upper_bound
+                    if not isinstance(instance_type, (Instance, TupleType)):
+                        # Too tricky, give up.
+                        # TODO: Do something more clever here.
+                        self.chk.fail(messages.UNSUPPORTED_ARGUMENT_2_FOR_SUPER, e)
+                        return
+                if isinstance(instance_type, TupleType):
+                    # Needed for named tuples and other Tuple[...] subclasses.
+                    instance_type = instance_type.fallback
+                if type_info not in instance_type.type.mro:
+                    self.chk.fail('Argument 2 for "super" not an instance of argument 1', e)
+            elif isinstance(instance_type, TypeType) or (isinstance(instance_type, FunctionLike)
+                                                         and instance_type.is_type_obj()):
+                # TODO: Check whether this is a valid type object here.
+                pass
+            elif not isinstance(instance_type, AnyType):
+                self.chk.fail(messages.UNSUPPORTED_ARGUMENT_2_FOR_SUPER, e)
+
     def analyze_super(self, e: SuperExpr, is_lvalue: bool) -> Type:
         """Type check a super expression."""
         if e.info and e.info.bases:
@@ -2016,24 +2170,29 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             if len(e.info.mro) < 2:
                 self.chk.fail('Internal error: unexpected mro for {}: {}'.format(
                     e.info.name(), e.info.mro), e)
-                return AnyType()
+                return AnyType(TypeOfAny.from_error)
             for base in e.info.mro[1:]:
                 if e.name in base.names or base == e.info.mro[-1]:
                     if e.info.fallback_to_any and base == e.info.mro[-1]:
                         # There's an undefined base class, and we're
                         # at the end of the chain.  That's not an error.
-                        return AnyType()
+                        return AnyType(TypeOfAny.special_form)
                     if not self.chk.in_checked_function():
-                        return AnyType()
+                        return AnyType(TypeOfAny.unannotated)
                     if self.chk.scope.active_class() is not None:
                         self.chk.fail('super() outside of a method is not supported', e)
-                        return AnyType()
-                    args = self.chk.scope.top_function().arguments
-                    # An empty args with super() is an error; we need something in declared_self
+                        return AnyType(TypeOfAny.from_error)
+                    method = self.chk.scope.top_function()
+                    assert method is not None
+                    args = method.arguments
+                    # super() in a function with empty args is an error; we
+                    # need something in declared_self.
                     if not args:
-                        self.chk.fail('super() requires at least one positional argument', e)
-                        return AnyType()
-                    declared_self = args[0].variable.type
+                        self.chk.fail(
+                            'super() requires one or more positional arguments in '
+                            'enclosing function', e)
+                        return AnyType(TypeOfAny.from_error)
+                    declared_self = args[0].variable.type or fill_typevars(e.info)
                     return analyze_member_access(name=e.name, typ=fill_typevars(e.info), node=e,
                                                  is_lvalue=False, is_super=True, is_operator=False,
                                                  builtin_type=self.named_type,
@@ -2043,7 +2202,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             assert False, 'unreachable'
         else:
             # Invalid super. This has been reported by the semantic analyzer.
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
 
     def visit_slice_expr(self, e: SliceExpr) -> Type:
         expected = make_optional_type(self.named_type('builtins.int'))
@@ -2195,7 +2354,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
 
     def accept(self,
                node: Expression,
-               type_context: Type = None,
+               type_context: Optional[Type] = None,
                allow_none_return: bool = False,
                always_allow_any: bool = False,
                ) -> Type:
@@ -2226,7 +2385,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             self.msg.disallowed_any_type(typ, node)
 
         if not self.chk.in_checked_function():
-            return AnyType()
+            return AnyType(TypeOfAny.unannotated)
         else:
             return typ
 
@@ -2240,7 +2399,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         """Is a type valid as a *args argument?"""
         return (isinstance(typ, TupleType) or
                 is_subtype(typ, self.chk.named_generic_type('typing.Iterable',
-                                                            [AnyType()])) or
+                                                            [AnyType(TypeOfAny.special_form)])) or
                 isinstance(typ, AnyType))
 
     def is_valid_keyword_var_arg(self, typ: Type) -> bool:
@@ -2248,18 +2407,18 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         if self.chk.options.python_version[0] >= 3:
             return is_subtype(typ, self.chk.named_generic_type(
                 'typing.Mapping', [self.named_type('builtins.str'),
-                                   AnyType()]))
+                                   AnyType(TypeOfAny.special_form)]))
         else:
             return (
                 is_subtype(typ, self.chk.named_generic_type(
                     'typing.Mapping',
                     [self.named_type('builtins.str'),
-                     AnyType()]))
+                     AnyType(TypeOfAny.special_form)]))
                 or
                 is_subtype(typ, self.chk.named_generic_type(
                     'typing.Mapping',
                     [self.named_type('builtins.unicode'),
-                     AnyType()])))
+                     AnyType(TypeOfAny.special_form)])))
 
     def has_member(self, typ: Type, member: str) -> bool:
         """Does type have member with the given name?"""
@@ -2306,7 +2465,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             expected_type = self.chk.named_generic_type('typing.Awaitable', [expected_type])
         actual_type = self.accept(e.expr, expected_type)
         if isinstance(actual_type, AnyType):
-            return AnyType()
+            return AnyType(TypeOfAny.from_another_any, source_any=actual_type)
         return self.check_awaitable_expr(actual_type, e, messages.INCOMPATIBLE_TYPES_IN_AWAIT)
 
     def check_awaitable_expr(self, t: Type, ctx: Context, msg: str) -> Type:
@@ -2316,7 +2475,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         """
         if not self.chk.check_subtype(t, self.named_type('typing.Awaitable'), ctx,
                                       msg, 'actual type', 'expected type'):
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
         else:
             method = self.analyze_external_member_access('__await__', t, ctx)
             generator = self.check_call(method, [], [], ctx)[0]
@@ -2329,29 +2488,33 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         # thus decorated.  But it accepts a generator regardless of
         # how it's decorated.
         return_type = self.chk.return_types[-1]
-        subexpr_type = self.accept(e.expr, return_type)
-        iter_type = None  # type: Type
+        # TODO: What should the context for the sub-expression be?
+        # If the containing function has type Generator[X, Y, ...],
+        # the context should be Generator[X, Y, T], where T is the
+        # context of the 'yield from' itself (but it isn't known).
+        subexpr_type = self.accept(e.expr)
 
         # Check that the expr is an instance of Iterable and get the type of the iterator produced
         # by __iter__.
         if isinstance(subexpr_type, AnyType):
-            iter_type = AnyType()
+            iter_type = AnyType(TypeOfAny.from_another_any, source_any=subexpr_type)  # type: Type
         elif self.chk.type_is_iterable(subexpr_type):
             if is_async_def(subexpr_type) and not has_coroutine_decorator(return_type):
                 self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e)
             iter_method_type = self.analyze_external_member_access(
                 '__iter__',
                 subexpr_type,
-                AnyType())
+                AnyType(TypeOfAny.special_form))
 
+            any_type = AnyType(TypeOfAny.special_form)
             generic_generator_type = self.chk.named_generic_type('typing.Generator',
-                                                                 [AnyType(), AnyType(), AnyType()])
+                                                                 [any_type, any_type, any_type])
             iter_type, _ = self.check_call(iter_method_type, [], [],
                                            context=generic_generator_type)
         else:
             if not (is_async_def(subexpr_type) and has_coroutine_decorator(return_type)):
                 self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e)
-                iter_type = AnyType()
+                iter_type = AnyType(TypeOfAny.from_error)
             else:
                 iter_type = self.check_awaitable_expr(subexpr_type, e,
                                                       messages.INCOMPATIBLE_TYPES_IN_YIELD_FROM)
@@ -2373,7 +2536,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
             # Non-Generators don't return anything from `yield from` expressions.
             # However special-case Any (which might be produced by an error).
             if isinstance(actual_item_type, AnyType):
-                expr_type = AnyType()
+                expr_type = AnyType(TypeOfAny.from_another_any, source_any=actual_item_type)
             else:
                 expr_type = NoneTyp()
 
@@ -2385,10 +2548,10 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         return e.type
 
     def visit_type_var_expr(self, e: TypeVarExpr) -> Type:
-        return AnyType(special_form=True)
+        return AnyType(TypeOfAny.special_form)
 
     def visit_newtype_expr(self, e: NewTypeExpr) -> Type:
-        return AnyType(special_form=True)
+        return AnyType(TypeOfAny.special_form)
 
     def visit_namedtuple_expr(self, e: NamedTupleExpr) -> Type:
         tuple_type = e.info.tuple_type
@@ -2398,7 +2561,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                 self.msg.unimported_type_becomes_any("NamedTuple type", tuple_type, e)
             check_for_explicit_any(tuple_type, self.chk.options, self.chk.is_typeshed_stub,
                                    self.msg, context=e)
-        return AnyType(special_form=True)
+        return AnyType(TypeOfAny.special_form)
 
     def visit_enum_call_expr(self, e: EnumCallExpr) -> Type:
         for name, value in zip(e.items, e.values):
@@ -2413,10 +2576,10 @@ class ExpressionChecker(ExpressionVisitor[Type]):
                         # to have type Any in the typeshed stub.)
                         var.type = typ
                         var.is_inferred = True
-        return AnyType(special_form=True)
+        return AnyType(TypeOfAny.special_form)
 
     def visit_typeddict_expr(self, e: TypedDictExpr) -> Type:
-        return AnyType(special_form=True)
+        return AnyType(TypeOfAny.special_form)
 
     def visit__promote_expr(self, e: PromoteExpr) -> Type:
         return e.type
@@ -2433,7 +2596,7 @@ class ExpressionChecker(ExpressionVisitor[Type]):
         return self.named_type('builtins.bool')
 
     def narrow_type_from_binder(self, expr: Expression, known_type: Type) -> Type:
-        if expr.literal >= LITERAL_TYPE:
+        if literal(expr) >= LITERAL_TYPE:
             restriction = self.chk.binder.get(expr)
             if restriction:
                 ans = narrow_declared_type(known_type, restriction)
@@ -2451,7 +2614,7 @@ class HasAnyType(types.TypeQuery[bool]):
         super().__init__(any)
 
     def visit_any(self, t: AnyType) -> bool:
-        return not t.special_form  # special forms are not real Any types
+        return t.type_of_any != TypeOfAny.special_form  # special forms are not real Any types
 
 
 def has_coroutine_decorator(t: Type) -> bool:
@@ -2481,9 +2644,9 @@ def is_async_def(t: Type) -> bool:
 
 
 def map_actuals_to_formals(caller_kinds: List[int],
-                           caller_names: List[str],
+                           caller_names: Optional[Sequence[Optional[str]]],
                            callee_kinds: List[int],
-                           callee_names: List[str],
+                           callee_names: List[Optional[str]],
                            caller_arg_type: Callable[[int],
                                                      Type]) -> List[List[int]]:
     """Calculate mapping between actual (caller) args and formals.
@@ -2531,6 +2694,7 @@ def map_actuals_to_formals(caller_kinds: List[int],
                         break
                     j += 1
         elif kind in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT):
+            assert caller_names is not None, "Internal error: named kinds without names given"
             name = caller_names[i]
             if name in callee_names:
                 map[callee_names.index(name)].append(i)
@@ -2591,7 +2755,7 @@ class HasTypeVarQuery(types.TypeQuery[bool]):
         return True
 
 
-def has_erased_component(t: Type) -> bool:
+def has_erased_component(t: Optional[Type]) -> bool:
     return t is not None and t.accept(HasErasedComponentsQuery())
 
 
@@ -2677,6 +2841,8 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
             # subtyping algorithm if type promotions are possible (e.g., int vs. float).
             if formal.type in actual.type.mro:
                 return 2
+            elif formal.type.is_protocol and is_subtype(actual, erasetype.erase_type(formal)):
+                return 2
             elif actual.type._promote and is_subtype(actual, formal):
                 return 1
             else:
@@ -2685,7 +2851,7 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
             item = actual.item
             if formal.type.fullname() in {"builtins.object", "builtins.type"}:
                 return 2
-            elif isinstance(item, Instance):
+            elif isinstance(item, Instance) and item.type.metaclass_type:
                 # FIX: this does not handle e.g. Union of instances
                 return overload_arg_similarity(item.type.metaclass_type, formal)
             else:
@@ -2697,3 +2863,72 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
         return 2
     # Fall back to a conservative equality check for the remaining kinds of type.
     return 2 if is_same_type(erasetype.erase_type(actual), erasetype.erase_type(formal)) else 0
+
+
+def any_arg_causes_overload_ambiguity(items: List[CallableType],
+                                      arg_types: List[Type],
+                                      arg_kinds: List[int],
+                                      arg_names: Optional[Sequence[Optional[str]]]) -> bool:
+    """May an Any actual argument cause ambiguous result type on call to overloaded function?
+
+    Note that this sometimes returns True even if there is no ambiguity, since a correct
+    implementation would be complex (and the call would be imprecisely typed due to Any
+    types anyway).
+
+    Args:
+        items: Overload items matching the actual arguments
+        arg_types: Actual argument types
+        arg_kinds: Actual argument kinds
+        arg_names: Actual argument names
+    """
+    actual_to_formal = [
+        map_formals_to_actuals(
+            arg_kinds, arg_names, item.arg_kinds, item.arg_names, lambda i: arg_types[i])
+        for item in items
+    ]
+
+    for arg_idx, arg_type in enumerate(arg_types):
+        if isinstance(arg_type, AnyType):
+            matching_formals_unfiltered = [(item_idx, lookup[arg_idx])
+                                           for item_idx, lookup in enumerate(actual_to_formal)
+                                           if lookup[arg_idx]]
+            matching_formals = []
+            for item_idx, formals in matching_formals_unfiltered:
+                if len(formals) > 1:
+                    # An actual maps to multiple formals -- give up as too
+                    # complex, just assume it overlaps.
+                    return True
+                matching_formals.append((item_idx, items[item_idx].arg_types[formals[0]]))
+            if (not all_same_types(t for _, t in matching_formals) and
+                    not all_same_types(items[idx].ret_type
+                                       for idx, _ in matching_formals)):
+                # Any maps to multiple different types, and the return types of these items differ.
+                return True
+    return False
+
+
+def all_same_types(types: Iterable[Type]) -> bool:
+    types = list(types)
+    if len(types) == 0:
+        return True
+    return all(is_same_type(t, types[0]) for t in types[1:])
+
+
+def map_formals_to_actuals(caller_kinds: List[int],
+                           caller_names: Optional[Sequence[Optional[str]]],
+                           callee_kinds: List[int],
+                           callee_names: List[Optional[str]],
+                           caller_arg_type: Callable[[int],
+                                                     Type]) -> List[List[int]]:
+    """Calculate the reverse mapping of map_actuals_to_formals."""
+    formal_to_actual = map_actuals_to_formals(caller_kinds,
+                                              caller_names,
+                                              callee_kinds,
+                                              callee_names,
+                                              caller_arg_type)
+    # Now reverse the mapping.
+    actual_to_formal = [[] for _ in caller_kinds]  # type: List[List[int]]
+    for formal, actuals in enumerate(formal_to_actual):
+        for actual in actuals:
+            actual_to_formal[actual].append(formal)
+    return actual_to_formal
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 112efcf..ea8aff8 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -4,7 +4,7 @@ from typing import cast, Callable, List, Optional, TypeVar
 
 from mypy.types import (
     Type, Instance, AnyType, TupleType, TypedDictType, CallableType, FunctionLike, TypeVarDef,
-    Overloaded, TypeVarType, UnionType, PartialType, UninhabitedType,
+    Overloaded, TypeVarType, UnionType, PartialType, UninhabitedType, TypeOfAny,
     DeletedType, NoneTyp, TypeType, function_type, get_type_vars,
 )
 from mypy.nodes import (
@@ -38,7 +38,7 @@ def analyze_member_access(name: str,
                           msg: MessageBuilder, *,
                           original_type: Type,
                           chk: 'mypy.checker.TypeChecker',
-                          override_info: TypeInfo = None) -> Type:
+                          override_info: Optional[TypeInfo] = None) -> Type:
     """Return the type of attribute `name` of typ.
 
     This is a general operation that supports various different variations:
@@ -53,12 +53,14 @@ def analyze_member_access(name: str,
     the fallback type, for example.
     original_type is always the type used in the initial call.
     """
+    # TODO: this and following functions share some logic with subtypes.find_member,
+    # consider refactoring.
     if isinstance(typ, Instance):
         if name == '__init__' and not is_super:
             # Accessing __init__ in statically typed code would compromise
             # type safety unless used via super().
             msg.fail(messages.CANNOT_ACCESS_INIT, node)
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
 
         # The base object has an instance type.
 
@@ -101,10 +103,10 @@ def analyze_member_access(name: str,
                                              original_type=original_type, chk=chk)
     elif isinstance(typ, AnyType):
         # The base object has dynamic type.
-        return AnyType()
+        return AnyType(TypeOfAny.from_another_any, source_any=typ)
     elif isinstance(typ, NoneTyp):
         if chk.should_suppress_optional_error([typ]):
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         # The only attribute NoneType has are those it inherits from object
         return analyze_member_access(name, builtin_type('builtins.object'), node, is_lvalue,
                                      is_super, is_operator, builtin_type, not_ready_callback, msg,
@@ -171,7 +173,7 @@ def analyze_member_access(name: str,
                                      original_type=original_type, chk=chk)
     elif isinstance(typ, DeletedType):
         msg.deleted_as_rvalue(typ, node)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
     elif isinstance(typ, TypeType):
         # Similar to FunctionLike + is_type_obj() above.
         item = None
@@ -202,7 +204,7 @@ def analyze_member_access(name: str,
                                      original_type=original_type, chk=chk)
 
     if chk.should_suppress_optional_error([typ]):
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
     return msg.has_no_attr(original_type, typ, name, node)
 
 
@@ -264,15 +266,15 @@ def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
                     return setattr_type.arg_types[-1]
 
     if itype.type.fallback_to_any:
-        return AnyType()
+        return AnyType(TypeOfAny.special_form)
 
     # Could not find the member.
     if is_super:
         msg.undefined_in_superclass(name, node)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
     else:
         if chk and chk.should_suppress_optional_error([itype]):
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         return msg.has_no_attr(original_type, itype, name, node)
 
 
@@ -325,7 +327,7 @@ def analyze_var(name: str, var: Var, itype: Instance, info: TypeInfo, node: Cont
         if not var.is_ready:
             not_ready_callback(var.name(), node)
         # Implicit 'Any' type.
-        result = AnyType()
+        result = AnyType(TypeOfAny.special_form)
     fullname = '{}.{}'.format(var.info.fullname(), name)
     hook = chk.plugin.get_attribute_hook(fullname)
     if hook:
@@ -354,7 +356,7 @@ def handle_partial_attribute_type(typ: PartialType, is_lvalue: bool, msg: Messag
         return typ
     else:
         msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
 
 def lookup_member_var_or_accessor(info: TypeInfo, name: str,
@@ -393,7 +395,7 @@ def check_method_type(functype: FunctionLike, itype: Instance, is_classmethod: b
                 if not subtypes.is_equivalent(clsarg.ret_type, itype):
                     msg.invalid_class_method_type(item, context)
             else:
-                if not subtypes.is_equivalent(clsarg, AnyType()):
+                if not subtypes.is_equivalent(clsarg, AnyType(TypeOfAny.special_form)):
                     msg.invalid_class_method_type(item, context)
 
 
@@ -409,7 +411,7 @@ def analyze_class_attribute_access(itype: Instance,
     node = itype.type.get(name)
     if not node:
         if itype.type.fallback_to_any:
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
         return None
 
     is_decorated = isinstance(node.node, Decorator)
@@ -435,12 +437,12 @@ def analyze_class_attribute_access(itype: Instance,
         return add_class_tvars(t, itype, is_classmethod, builtin_type, original_type)
     elif isinstance(node.node, Var):
         not_ready_callback(name, context)
-        return AnyType()
+        return AnyType(TypeOfAny.special_form)
 
     if isinstance(node.node, TypeVarExpr):
         msg.fail('Type variable "{}.{}" cannot be used as an expression'.format(
                  itype.type.name(), name), context)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     if isinstance(node.node, TypeInfo):
         return type_object_type(node.node, builtin_type)
@@ -451,7 +453,7 @@ def analyze_class_attribute_access(itype: Instance,
 
     if is_decorated:
         # TODO: Return type of decorated function. This is quick hack to work around #998.
-        return AnyType()
+        return AnyType(TypeOfAny.special_form)
     else:
         return function_type(cast(FuncBase, node.node), builtin_type('builtins.function'))
 
@@ -501,7 +503,7 @@ def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) ->
     init_method = info.get_method('__init__')
     if not init_method:
         # Must be an invalid class definition.
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
     else:
         fallback = info.metaclass_type or builtin_type('builtins.type')
         if init_method.info.fullname() == 'builtins.object':
@@ -514,10 +516,11 @@ def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) ->
             # base class, we can't know for sure, so check for that.
             if info.fallback_to_any:
                 # Construct a universal callable as the prototype.
-                sig = CallableType(arg_types=[AnyType(), AnyType()],
+                any_type = AnyType(TypeOfAny.special_form)
+                sig = CallableType(arg_types=[any_type, any_type],
                                    arg_kinds=[ARG_STAR, ARG_STAR2],
                                    arg_names=["_args", "_kwds"],
-                                   ret_type=AnyType(),
+                                   ret_type=any_type,
                                    fallback=builtin_type('builtins.function'))
                 return class_callable(sig, info, fallback, None)
         # Construct callable type based on signature of __init__. Adjust
@@ -603,7 +606,7 @@ def map_type_from_supertype(typ: Type, sub_info: TypeInfo,
 F = TypeVar('F', bound=FunctionLike)
 
 
-def bind_self(method: F, original_type: Type = None, is_classmethod: bool = False) -> F:
+def bind_self(method: F, original_type: Optional[Type] = None, is_classmethod: bool = False) -> F:
     """Return a copy of `method`, with the type of its first parameter (usually
     self or cls) bound to original_type.
 
@@ -627,7 +630,7 @@ def bind_self(method: F, original_type: Type = None, is_classmethod: bool = Fals
 
     """
     if isinstance(method, Overloaded):
-        return cast(F, Overloaded([bind_self(c, method) for c in method.items()]))
+        return cast(F, Overloaded([bind_self(c, original_type) for c in method.items()]))
     assert isinstance(method, CallableType)
     func = method
     if not func.arg_types:
diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py
index 3dde6d7..3544b01 100644
--- a/mypy/checkstrformat.py
+++ b/mypy/checkstrformat.py
@@ -5,7 +5,7 @@ import re
 from typing import cast, List, Tuple, Dict, Callable, Union, Optional
 
 from mypy.types import (
-    Type, AnyType, TupleType, Instance, UnionType
+    Type, AnyType, TupleType, Instance, UnionType, TypeOfAny
 )
 from mypy.nodes import (
     StrExpr, BytesExpr, UnicodeExpr, TupleExpr, DictExpr, Context, Expression, StarExpr
@@ -71,7 +71,7 @@ class StringFormatterChecker:
         if isinstance(expr, BytesExpr) and (3, 0) <= self.chk.options.python_version < (3, 5):
             self.msg.fail('Bytes formatting is only supported in Python 3.5 and later',
                           replacements)
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
 
         if has_mapping_keys is None:
             pass  # Error was reported
@@ -185,8 +185,9 @@ class StringFormatterChecker:
                                        'placeholder with key \'%s\' has type' % specifier.key)
         else:
             rep_type = self.accept(replacements)
+            any_type = AnyType(TypeOfAny.special_form)
             dict_type = self.chk.named_generic_type('builtins.dict',
-                                            [AnyType(), AnyType()])
+                                                    [any_type, any_type])
             self.chk.check_subtype(rep_type, dict_type, replacements,
                                    messages.FORMAT_REQUIRES_MAPPING,
                                    'expression has type', 'expected type for mapping is')
@@ -309,9 +310,10 @@ class StringFormatterChecker:
             if self.chk.options.python_version < (3, 0):
                 self.msg.fail("Format character 'a' is only supported in Python 3", context)
                 return None
-            return AnyType()
+            # todo: return type object?
+            return AnyType(TypeOfAny.special_form)
         elif p in ['s', 'r']:
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
         elif p in ['d', 'i', 'o', 'u', 'x', 'X',
                    'e', 'E', 'f', 'F', 'g', 'G']:
             return UnionType([self.named_type('builtins.int'),
@@ -334,6 +336,6 @@ class StringFormatterChecker:
         """
         return self.chk.named_type(name)
 
-    def accept(self, expr: Expression, context: Type = None) -> Type:
+    def accept(self, expr: Expression, context: Optional[Type] = None) -> Type:
         """Type check a node. Alias for TypeChecker.accept."""
         return self.chk.expr_checker.accept(expr, context)
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 97b0eea..4e7c2ec 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -1,12 +1,12 @@
 """Type inference constraints."""
 
-from typing import Iterable, List, Optional
+from typing import Iterable, List, Optional, Sequence
 
 from mypy import experiments
 from mypy.types import (
-    CallableType, Type, TypeVisitor, UnboundType, AnyType, NoneTyp, TypeVarType,
-    Instance, TupleType, TypedDictType, UnionType, Overloaded, ErasedType, PartialType,
-    DeletedType, UninhabitedType, TypeType, TypeVarId, TypeQuery, is_named_instance
+    CallableType, Type, TypeVisitor, UnboundType, AnyType, NoneTyp, TypeVarType, Instance,
+    TupleType, TypedDictType, UnionType, Overloaded, ErasedType, PartialType, DeletedType,
+    UninhabitedType, TypeType, TypeVarId, TypeQuery, is_named_instance, TypeOfAny
 )
 from mypy.maptype import map_instance_to_supertype
 from mypy import nodes
@@ -42,7 +42,7 @@ class Constraint:
 
 
 def infer_constraints_for_callable(
-        callee: CallableType, arg_types: List[Optional[Type]], arg_kinds: List[int],
+        callee: CallableType, arg_types: Sequence[Optional[Type]], arg_kinds: List[int],
         formal_to_actual: List[List[int]]) -> List[Constraint]:
     """Infer type variable constraints for a callable and actual arguments.
 
@@ -82,19 +82,19 @@ def get_actual_type(arg_type: Type, kind: int,
                 # TODO try to map type arguments to Iterable
                 return arg_type.args[0]
             else:
-                return AnyType()
+                return AnyType(TypeOfAny.from_error)
         elif isinstance(arg_type, TupleType):
             # Get the next tuple item of a tuple *arg.
             tuple_counter[0] += 1
             return arg_type.items[tuple_counter[0] - 1]
         else:
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
     elif kind == nodes.ARG_STAR2:
         if isinstance(arg_type, Instance) and (arg_type.type.fullname() == 'builtins.dict'):
             # Dict **arg. TODO more general (Mapping)
             return arg_type.args[1]
         else:
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
     else:
         # No translation for other kinds.
         return arg_type
@@ -305,7 +305,7 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
     # Non-leaf types
 
     def visit_instance(self, template: Instance) -> List[Constraint]:
-        actual = self.actual
+        original_actual = actual = self.actual
         res = []  # type: List[Constraint]
         if isinstance(actual, CallableType) and actual.fallback is not None:
             actual = actual.fallback
@@ -313,6 +313,8 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
             actual = actual.as_anonymous().fallback
         if isinstance(actual, Instance):
             instance = actual
+            # We always try nominal inference if possible,
+            # it is much faster than the structural one.
             if (self.direction == SUBTYPE_OF and
                     template.type.has_base(instance.type.fullname())):
                 mapped = map_instance_to_supertype(template, instance.type)
@@ -336,9 +338,31 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
                     res.extend(infer_constraints(
                         template.args[j], mapped.args[j], neg_op(self.direction)))
                 return res
+            if (template.type.is_protocol and self.direction == SUPERTYPE_OF and
+                    # We avoid infinite recursion for structural subtypes by checking
+                    # whether this type already appeared in the inference chain.
+                    # This is a conservative way break the inference cycles.
+                    # It never produces any "false" constraints but gives up soon
+                    # on purely structural inference cycles, see #3829.
+                    not any(is_same_type(template, t) for t in template.type.inferring) and
+                    mypy.subtypes.is_subtype(instance, erase_typevars(template))):
+                template.type.inferring.append(template)
+                self.infer_constraints_from_protocol_members(res, instance, template,
+                                                             original_actual, template)
+                template.type.inferring.pop()
+                return res
+            elif (instance.type.is_protocol and self.direction == SUBTYPE_OF and
+                  # We avoid infinite recursion for structural subtypes also here.
+                  not any(is_same_type(instance, i) for i in instance.type.inferring) and
+                  mypy.subtypes.is_subtype(erase_typevars(template), instance)):
+                instance.type.inferring.append(instance)
+                self.infer_constraints_from_protocol_members(res, instance, template,
+                                                             template, instance)
+                instance.type.inferring.pop()
+                return res
         if isinstance(actual, AnyType):
             # IDEA: Include both ways, i.e. add negation as well?
-            return self.infer_against_any(template.args)
+            return self.infer_against_any(template.args, actual)
         if (isinstance(actual, TupleType) and
             (is_named_instance(template, 'typing.Iterable') or
              is_named_instance(template, 'typing.Container') or
@@ -349,9 +373,36 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
                 cb = infer_constraints(template.args[0], item, SUPERTYPE_OF)
                 res.extend(cb)
             return res
+        elif (isinstance(actual, TupleType) and template.type.is_protocol and
+              self.direction == SUPERTYPE_OF):
+            if mypy.subtypes.is_subtype(actual.fallback, erase_typevars(template)):
+                res.extend(infer_constraints(template, actual.fallback, self.direction))
+                return res
+            return []
         else:
             return []
 
+    def infer_constraints_from_protocol_members(self, res: List[Constraint],
+                                                instance: Instance, template: Instance,
+                                                subtype: Type, protocol: Instance) -> None:
+        """Infer constraints for situations where either 'template' or 'instance' is a protocol.
+
+        The 'protocol' is the one of two that is an instance of protocol type, 'subtype'
+        is the type used to bind self during inference. Currently, we just infer constrains for
+        every protocol member type (both ways for settable members).
+        """
+        for member in protocol.type.protocol_members:
+            inst = mypy.subtypes.find_member(member, instance, subtype)
+            temp = mypy.subtypes.find_member(member, template, subtype)
+            assert inst is not None and temp is not None
+            # The above is safe since at this point we know that 'instance' is a subtype
+            # of (erased) 'template', therefore it defines all protocol members
+            res.extend(infer_constraints(temp, inst, self.direction))
+            if (mypy.subtypes.IS_SETTABLE in
+                    mypy.subtypes.get_member_flags(member, protocol.type)):
+                # Settable members are invariant, add opposite constraints
+                res.extend(infer_constraints(temp, inst, neg_op(self.direction)))
+
     def visit_callable_type(self, template: CallableType) -> List[Constraint]:
         if isinstance(self.actual, CallableType):
             cactual = self.actual
@@ -371,14 +422,22 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
             return res
         elif isinstance(self.actual, AnyType):
             # FIX what if generic
-            res = self.infer_against_any(template.arg_types)
-            res.extend(infer_constraints(template.ret_type, AnyType(),
-                                         self.direction))
+            res = self.infer_against_any(template.arg_types, self.actual)
+            any_type = AnyType(TypeOfAny.from_another_any, source_any=self.actual)
+            res.extend(infer_constraints(template.ret_type, any_type, self.direction))
             return res
         elif isinstance(self.actual, Overloaded):
             return self.infer_against_overloaded(self.actual, template)
         elif isinstance(self.actual, TypeType):
             return infer_constraints(template.ret_type, self.actual.item, self.direction)
+        elif isinstance(self.actual, Instance):
+            # Instances with __call__ method defined are considered structural
+            # subtypes of Callable with a compatible signature.
+            call = mypy.subtypes.find_member('__call__', self.actual, self.actual)
+            if call:
+                return infer_constraints(template, call, self.direction)
+            else:
+                return []
         else:
             return []
 
@@ -403,7 +462,7 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
                                              self.direction))
             return res
         elif isinstance(actual, AnyType):
-            return self.infer_against_any(template.items)
+            return self.infer_against_any(template.items, actual)
         else:
             return []
 
@@ -419,7 +478,7 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
                                              self.direction))
             return res
         elif isinstance(actual, AnyType):
-            return self.infer_against_any(template.items.values())
+            return self.infer_against_any(template.items.values(), actual)
         else:
             return []
 
@@ -427,10 +486,10 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
         assert False, ("Unexpected UnionType in ConstraintBuilderVisitor"
                        " (should have been handled in infer_constraints)")
 
-    def infer_against_any(self, types: Iterable[Type]) -> List[Constraint]:
+    def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> List[Constraint]:
         res = []  # type: List[Constraint]
         for t in types:
-            res.extend(infer_constraints(t, AnyType(), self.direction))
+            res.extend(infer_constraints(t, any_type, self.direction))
         return res
 
     def visit_overloaded(self, template: Overloaded) -> List[Constraint]:
diff --git a/mypy/defaults.py b/mypy/defaults.py
index b5398f9..254db92 100644
--- a/mypy/defaults.py
+++ b/mypy/defaults.py
@@ -1,4 +1,5 @@
 PYTHON2_VERSION = (2, 7)
 PYTHON3_VERSION = (3, 6)
+PYTHON3_VERSION_MIN = (3, 3)
 CACHE_DIR = '.mypy_cache'
 CONFIG_FILE = 'mypy.ini'
diff --git a/mypy/erasetype.py b/mypy/erasetype.py
index 910793b..a227280 100644
--- a/mypy/erasetype.py
+++ b/mypy/erasetype.py
@@ -1,9 +1,9 @@
 from typing import Optional, Container, Callable
 
 from mypy.types import (
-    Type, TypeVisitor, UnboundType, AnyType, NoneTyp, TypeVarId,
-    Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded,
-    ErasedType, PartialType, DeletedType, TypeTranslator, TypeList, UninhabitedType, TypeType
+    Type, TypeVisitor, UnboundType, AnyType, NoneTyp, TypeVarId, Instance, TypeVarType,
+    CallableType, TupleType, TypedDictType, UnionType, Overloaded, ErasedType, PartialType,
+    DeletedType, TypeTranslator, TypeList, UninhabitedType, TypeType, TypeOfAny
 )
 from mypy import experiments
 
@@ -51,10 +51,10 @@ class EraseTypeVisitor(TypeVisitor[Type]):
         return t
 
     def visit_instance(self, t: Instance) -> Type:
-        return Instance(t.type, [AnyType()] * len(t.args), t.line)
+        return Instance(t.type, [AnyType(TypeOfAny.special_form)] * len(t.args), t.line)
 
     def visit_type_var(self, t: TypeVarType) -> Type:
-        return AnyType()
+        return AnyType(TypeOfAny.special_form)
 
     def visit_callable_type(self, t: CallableType) -> Type:
         # We must preserve the fallback type for overload resolution to work.
@@ -86,7 +86,7 @@ def erase_typevars(t: Type, ids_to_erase: Optional[Container[TypeVarId]] = None)
         if ids_to_erase is None:
             return True
         return id in ids_to_erase
-    return t.accept(TypeVarEraser(erase_id, AnyType()))
+    return t.accept(TypeVarEraser(erase_id, AnyType(TypeOfAny.special_form)))
 
 
 def replace_meta_vars(t: Type, target_type: Type) -> Type:
diff --git a/mypy/errors.py b/mypy/errors.py
index df1d7ee..4009e6a 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -11,6 +11,7 @@ from mypy.version import __version__ as mypy_version
 
 
 T = TypeVar('T')
+allowed_duplicates = ['@overload', 'Got:', 'Expected:']
 
 
 class ErrorInfo:
@@ -65,8 +66,8 @@ class ErrorInfo:
                  message: str,
                  blocker: bool,
                  only_once: bool,
-                 origin: Tuple[str, int] = None,
-                 target: str = None) -> None:
+                 origin: Optional[Tuple[str, int]] = None,
+                 target: Optional[str] = None) -> None:
         self.import_ctx = import_ctx
         self.file = file
         self.module = module
@@ -175,7 +176,9 @@ class Errors:
         file = os.path.normpath(file)
         return remove_path_prefix(file, self.ignore_prefix)
 
-    def set_file(self, file: str, module: Optional[str], ignored_lines: Set[int] = None) -> None:
+    def set_file(self, file: str,
+                 module: Optional[str],
+                 ignored_lines: Optional[Set[int]] = None) -> None:
         """Set the path and module id of the current file."""
         # The path will be simplified later, in render_messages. That way
         #  * 'file' is always a key that uniquely identifies a source file
@@ -251,9 +254,16 @@ class Errors:
         """Replace the entire import context with a new value."""
         self.import_ctx = ctx[:]
 
-    def report(self, line: int, column: int, message: str, blocker: bool = False,
-               severity: str = 'error', file: str = None, only_once: bool = False,
-               origin_line: int = None) -> None:
+    def report(self,
+               line: int,
+               column: int,
+               message: str,
+               blocker: bool = False,
+               severity: str = 'error',
+               file: Optional[str] = None,
+               only_once: bool = False,
+               origin_line: Optional[int] = None,
+               offset: int = 0) -> None:
         """Report message at the given line using the current error context.
 
         Args:
@@ -270,6 +280,8 @@ class Errors:
             type = None  # Omit type context if nested function
         if file is None:
             file = self.file
+        if offset:
+            message = " " * offset + message
         info = ErrorInfo(self.import_context(), file, self.current_module(), type,
                          self.function_or_member[-1], line, column, severity, message,
                          blocker, only_once,
@@ -471,6 +483,10 @@ class Errors:
             while (j >= 0 and errors[j][0] == errors[i][0] and
                     errors[j][1] == errors[i][1]):
                 if (errors[j][3] == errors[i][3] and
+                        # Allow duplicate notes in overload conficts reporting
+                        not (errors[i][3] == 'note' and
+                             errors[i][4].strip() in allowed_duplicates
+                             or errors[i][4].strip().startswith('def ')) and
                         errors[j][4] == errors[i][4]):  # ignore column
                     dup = True
                     break
@@ -514,7 +530,7 @@ def remove_path_prefix(path: str, prefix: str) -> str:
         return path
 
 
-def report_internal_error(err: Exception, file: str, line: int,
+def report_internal_error(err: Exception, file: Optional[str], line: int,
                           errors: Errors, options: Options) -> None:
     """Report internal error and exit.
 
@@ -529,13 +545,16 @@ def report_internal_error(err: Exception, file: str, line: int,
         print("Failed to dump errors:", repr(e), file=sys.stderr)
 
     # Compute file:line prefix for official-looking error messages.
-    if line:
-        prefix = '{}:{}'.format(file, line)
+    if file:
+        if line:
+            prefix = '{}:{}: '.format(file, line)
+        else:
+            prefix = '{}: '.format(file)
     else:
-        prefix = file
+        prefix = ''
 
     # Print "INTERNAL ERROR" message.
-    print('{}: error: INTERNAL ERROR --'.format(prefix),
+    print('{}error: INTERNAL ERROR --'.format(prefix),
           'please report a bug at https://github.com/python/mypy/issues',
           'version: {}'.format(mypy_version),
           file=sys.stderr)
diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py
index 8325f6d..0e1fcef 100644
--- a/mypy/exprtotype.py
+++ b/mypy/exprtotype.py
@@ -7,7 +7,7 @@ from mypy.nodes import (
 )
 from mypy.fastparse import parse_type_comment
 from mypy.types import (
-    Type, UnboundType, TypeList, EllipsisType, AnyType, Optional, CallableArgument,
+    Type, UnboundType, TypeList, EllipsisType, AnyType, Optional, CallableArgument, TypeOfAny
 )
 
 
@@ -77,7 +77,7 @@ def expr_to_unanalyzed_type(expr: Expression, _parent: Optional[Expression] = No
 
         # Go through the constructor args to get its name and type.
         name = None
-        default_type = AnyType(implicit=True)
+        default_type = AnyType(TypeOfAny.unannotated)
         typ = default_type  # type: Type
         for i, arg in enumerate(expr.args):
             if expr.arg_names[i] is not None:
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index 26a4abe..cb1eeba 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -1,7 +1,9 @@
 from functools import wraps
 import sys
 
-from typing import Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List, Set
+from typing import (
+    Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List, Set, overload
+)
 from mypy.sharedparse import (
     special_function_elide_names, argument_elide_name,
 )
@@ -23,8 +25,8 @@ from mypy.nodes import (
     check_arg_names,
 )
 from mypy.types import (
-    Type, CallableType, AnyType, UnboundType, TupleType, TypeList, EllipsisType,
-    CallableArgument,
+    Type, CallableType, AnyType, UnboundType, TupleType, TypeList, EllipsisType, CallableArgument,
+    TypeOfAny
 )
 from mypy import defaults
 from mypy import experiments
@@ -56,11 +58,17 @@ T = TypeVar('T', bound=Union[ast3.expr, ast3.stmt])
 U = TypeVar('U', bound=Node)
 V = TypeVar('V')
 
+# There is no way to create reasonable fallbacks at this stage,
+# they must be patched later.
+_dummy_fallback = None  # type: Any
+
 TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment'
 TYPE_COMMENT_AST_ERROR = 'invalid type comment or annotation'
 
 
-def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
+def parse(source: Union[str, bytes],
+          fnam: str,
+          errors: Optional[Errors] = None,
           options: Options = Options()) -> MypyFile:
 
     """Parse a source file, without doing any semantic analysis.
@@ -72,8 +80,8 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
     if errors is None:
         errors = Errors()
         raise_on_error = True
-    errors.set_file('<input>' if fnam is None else fnam, None)
-    is_stub_file = bool(fnam) and fnam.endswith('.pyi')
+    errors.set_file(fnam, None)
+    is_stub_file = fnam.endswith('.pyi')
     try:
         if is_stub_file:
             feature_version = defaults.PYTHON3_VERSION[1]
@@ -121,7 +129,7 @@ def with_line(f: Callable[['ASTConverter', T], U]) -> Callable[['ASTConverter',
     return wrapper
 
 
-def find(f: Callable[[V], bool], seq: Sequence[V]) -> V:
+def find(f: Callable[[V], bool], seq: Sequence[V]) -> Optional[V]:
     for item in seq:
         if f(item):
             return item
@@ -137,7 +145,7 @@ def is_no_type_check_decorator(expr: ast3.expr) -> bool:
     return False
 
 
-class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
+class ASTConverter(ast3.NodeTransformer):
     def __init__(self,
                  options: Options,
                  is_stub: bool,
@@ -155,14 +163,16 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
     def generic_visit(self, node: ast3.AST) -> None:
         raise RuntimeError('AST node not implemented: ' + str(type(node)))
 
-    def visit_NoneType(self, n: Any) -> Optional[Node]:
-        return None
+    def visit(self, node: Optional[ast3.AST]) -> Any:  # same as in typed_ast stub
+        if node is None:
+            return None
+        return super().visit(node)
 
     def translate_expr_list(self, l: Sequence[ast3.AST]) -> List[Expression]:
         res = []  # type: List[Expression]
         for e in l:
             exp = self.visit(e)
-            assert exp is None or isinstance(exp, Expression)
+            isinstance(exp, Expression)
             res.append(exp)
         return res
 
@@ -170,7 +180,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
         res = []  # type: List[Statement]
         for e in l:
             stmt = self.visit(e)
-            assert stmt is None or isinstance(stmt, Statement)
+            isinstance(stmt, Statement)
             res.append(stmt)
         return res
 
@@ -217,13 +227,19 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
         else:
             return op_name
 
-    def as_block(self, stmts: List[ast3.stmt], lineno: int) -> Block:
+    def as_block(self, stmts: List[ast3.stmt], lineno: int) -> Optional[Block]:
         b = None
         if stmts:
             b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts)))
             b.set_line(lineno)
         return b
 
+    def as_required_block(self, stmts: List[ast3.stmt], lineno: int) -> Block:
+        assert stmts  # must be non-empty
+        b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts)))
+        b.set_line(lineno)
+        return b
+
     def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]:
         ret = []  # type: List[Statement]
         current_overload = []  # type: List[OverloadPart]
@@ -306,7 +322,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
         arg_names = [None if argument_elide_name(name) else name for name in arg_names]
         if special_function_elide_names(n.name):
             arg_names = [None] * len(arg_names)
-        arg_types = None  # type: List[Type]
+        arg_types = []  # type: List[Optional[Type]]
         if no_type_check:
             arg_types = [None] * len(args)
             return_type = None
@@ -320,7 +336,9 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
                     if n.returns:
                         # PEP 484 disallows both type annotations and type comments
                         self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset)
-                    arg_types = [a.type_annotation if a.type_annotation is not None else AnyType()
+                    arg_types = [a.type_annotation
+                                 if a.type_annotation is not None
+                                 else AnyType(TypeOfAny.unannotated)
                                  for a in args]
                 else:
                     # PEP 484 disallows both type annotations and type comments
@@ -328,18 +346,18 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
                         self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset)
                     translated_args = (TypeConverter(self.errors, line=n.lineno)
                                        .translate_expr_list(func_type_ast.argtypes))
-                    arg_types = [a if a is not None else AnyType()
+                    arg_types = [a if a is not None else AnyType(TypeOfAny.unannotated)
                                 for a in translated_args]
                 return_type = TypeConverter(self.errors,
                                             line=n.lineno).visit(func_type_ast.returns)
 
                 # add implicit self type
                 if self.in_class() and len(arg_types) < len(args):
-                    arg_types.insert(0, AnyType())
+                    arg_types.insert(0, AnyType(TypeOfAny.special_form))
             except SyntaxError:
                 self.fail(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset)
-                arg_types = [AnyType()] * len(args)
-                return_type = AnyType()
+                arg_types = [AnyType(TypeOfAny.from_error)] * len(args)
+                return_type = AnyType(TypeOfAny.from_error)
         else:
             arg_types = [a.type_annotation for a in args]
             return_type = TypeConverter(self.errors, line=n.returns.lineno
@@ -359,16 +377,16 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
                 self.fail('Type signature has too few arguments', n.lineno, 0)
             else:
                 func_type = CallableType([a if a is not None else
-                                          AnyType(implicit=True) for a in arg_types],
+                                          AnyType(TypeOfAny.unannotated) for a in arg_types],
                                          arg_kinds,
                                          arg_names,
                                          return_type if return_type is not None else
-                                         AnyType(implicit=True),
-                                         None)
+                                         AnyType(TypeOfAny.unannotated),
+                                         _dummy_fallback)
 
         func_def = FuncDef(n.name,
                        args,
-                       self.as_block(n.body, n.lineno),
+                       self.as_required_block(n.body, n.lineno),
                        func_type)
         if is_coroutine:
             # A coroutine is also a generator, mostly for internal reasons.
@@ -389,7 +407,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
         else:
             return func_def
 
-    def set_type_optional(self, type: Type, initializer: Expression) -> None:
+    def set_type_optional(self, type: Optional[Type], initializer: Optional[Expression]) -> None:
         if self.options.no_implicit_optional:
             return
         # Indicate that type should be wrapped in an Optional if arg is initialized to None.
@@ -461,20 +479,14 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
     @with_line
     def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef:
         self.class_nesting += 1
-        metaclass_arg = find(lambda x: x.arg == 'metaclass', n.keywords)
-        metaclass = None
-        if metaclass_arg:
-            metaclass = stringify_name(metaclass_arg.value)
-            if metaclass is None:
-                metaclass = '<error>'  # To be reported later
         keywords = [(kw.arg, self.visit(kw.value))
-                    for kw in n.keywords]
+                    for kw in n.keywords if kw.arg]
 
         cdef = ClassDef(n.name,
-                        self.as_block(n.body, n.lineno),
+                        self.as_required_block(n.body, n.lineno),
                         None,
                         self.translate_expr_list(n.bases),
-                        metaclass=metaclass,
+                        metaclass=dict(keywords).get('metaclass'),
                         keywords=keywords)
         cdef.decorators = self.translate_expr_list(n.decorator_list)
         self.class_nesting -= 1
@@ -510,10 +522,11 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
     @with_line
     def visit_AnnAssign(self, n: ast3.AnnAssign) -> AssignmentStmt:
         if n.value is None:  # always allow 'x: int'
-            rvalue = TempNode(AnyType())  # type: Expression
+            rvalue = TempNode(AnyType(TypeOfAny.special_form), no_rhs=True)  # type: Expression
         else:
             rvalue = self.visit(n.value)
         typ = TypeConverter(self.errors, line=n.lineno).visit(n.annotation)
+        assert typ is not None
         typ.column = n.annotation.col_offset
         return AssignmentStmt([self.visit(n.target)], rvalue, type=typ, new_syntax=True)
 
@@ -533,7 +546,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
             target_type = None
         return ForStmt(self.visit(n.target),
                        self.visit(n.iter),
-                       self.as_block(n.body, n.lineno),
+                       self.as_required_block(n.body, n.lineno),
                        self.as_block(n.orelse, n.lineno),
                        target_type)
 
@@ -546,7 +559,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
             target_type = None
         r = ForStmt(self.visit(n.target),
                     self.visit(n.iter),
-                    self.as_block(n.body, n.lineno),
+                    self.as_required_block(n.body, n.lineno),
                     self.as_block(n.orelse, n.lineno),
                     target_type)
         r.is_async = True
@@ -556,14 +569,14 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
     @with_line
     def visit_While(self, n: ast3.While) -> WhileStmt:
         return WhileStmt(self.visit(n.test),
-                         self.as_block(n.body, n.lineno),
+                         self.as_required_block(n.body, n.lineno),
                          self.as_block(n.orelse, n.lineno))
 
     # If(expr test, stmt* body, stmt* orelse)
     @with_line
     def visit_If(self, n: ast3.If) -> IfStmt:
         return IfStmt([self.visit(n.test)],
-                      [self.as_block(n.body, n.lineno)],
+                      [self.as_required_block(n.body, n.lineno)],
                       self.as_block(n.orelse, n.lineno))
 
     # With(withitem* items, stmt* body, string? type_comment)
@@ -575,7 +588,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
             target_type = None
         return WithStmt([self.visit(i.context_expr) for i in n.items],
                         [self.visit(i.optional_vars) for i in n.items],
-                        self.as_block(n.body, n.lineno),
+                        self.as_required_block(n.body, n.lineno),
                         target_type)
 
     # AsyncWith(withitem* items, stmt* body, string? type_comment)
@@ -587,7 +600,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
             target_type = None
         r = WithStmt([self.visit(i.context_expr) for i in n.items],
                      [self.visit(i.optional_vars) for i in n.items],
-                     self.as_block(n.body, n.lineno),
+                     self.as_required_block(n.body, n.lineno),
                      target_type)
         r.is_async = True
         return r
@@ -602,9 +615,9 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
     def visit_Try(self, n: ast3.Try) -> TryStmt:
         vs = [NameExpr(h.name) if h.name is not None else None for h in n.handlers]
         types = [self.visit(h.type) for h in n.handlers]
-        handlers = [self.as_block(h.body, h.lineno) for h in n.handlers]
+        handlers = [self.as_required_block(h.body, h.lineno) for h in n.handlers]
 
-        return TryStmt(self.as_block(n.body, n.lineno),
+        return TryStmt(self.as_required_block(n.body, n.lineno),
                        vs,
                        types,
                        handlers,
@@ -619,7 +632,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
     # Import(alias* names)
     @with_line
     def visit_Import(self, n: ast3.Import) -> Import:
-        names = []  # type: List[Tuple[str, str]]
+        names = []  # type: List[Tuple[str, Optional[str]]]
         for alias in n.names:
             name = self.translate_module_id(alias.name)
             asname = alias.asname
@@ -636,9 +649,10 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
     # ImportFrom(identifier? module, alias* names, int? level)
     @with_line
     def visit_ImportFrom(self, n: ast3.ImportFrom) -> ImportBase:
-        i = None  # type: ImportBase
+        assert n.level is not None
         if len(n.names) == 1 and n.names[0].name == '*':
-            i = ImportAll(n.module, n.level)
+            assert n.module is not None
+            i = ImportAll(n.module, n.level)  # type: ImportBase
         else:
             i = ImportFrom(self.translate_module_id(n.module) if n.module is not None else '',
                            n.level,
@@ -683,7 +697,6 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
     def visit_BoolOp(self, n: ast3.BoolOp) -> OpExpr:
         # mypy translates (1 and 2 and 3) as (1 and (2 and 3))
         assert len(n.values) >= 2
-        op = None
         if isinstance(n.op, ast3.And):
             op = 'and'
         elif isinstance(n.op, ast3.Or):
@@ -736,7 +749,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
         body.col_offset = n.col_offset
 
         return LambdaExpr(self.transform_args(n.args, n.lineno),
-                        self.as_block([body], n.lineno))
+                        self.as_required_block([body], n.lineno))
 
     # IfExp(expr test, expr body, expr orelse)
     @with_line
@@ -831,7 +844,8 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
         return CallExpr(self.visit(n.func),
                         arg_types,
                         arg_kinds,
-                        cast("List[str]", [None] * len(n.args)) + [k.arg for k in n.keywords])
+                        cast(List[Optional[str]], [None] * len(n.args)) +
+                        [k.arg for k in n.keywords])
 
     # Num(object n) -- a number as a PyObject.
     @with_line
@@ -871,7 +885,8 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
             join_method.set_line(empty_string)
             result_expression = CallExpr(join_method,
                                          [strs_to_join],
-                                         [ARG_POS])
+                                         [ARG_POS],
+                                         [None])
             return result_expression
 
         # FormattedValue(expr value)
@@ -888,7 +903,8 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
             format_method.set_line(format_string)
             result_expression = CallExpr(format_method,
                                          [exp],
-                                         [ARG_POS])
+                                         [ARG_POS],
+                                         [None])
             return result_expression
 
     # Bytes(bytes s)
@@ -914,7 +930,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
         if (isinstance(n.value, ast3.Call) and
                 isinstance(n.value.func, ast3.Name) and
                 n.value.func.id == 'super'):
-            return SuperExpr(n.attr)
+            return SuperExpr(n.attr, self.visit(n.value))
 
         return MemberExpr(self.visit(n.value), n.attr)
 
@@ -960,43 +976,57 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
         return self.visit(n.value)
 
 
-class TypeConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
-    def __init__(self, errors: Errors, line: int = -1) -> None:
+class TypeConverter(ast3.NodeTransformer):
+    def __init__(self, errors: Optional[Errors], line: int = -1) -> None:
         self.errors = errors
         self.line = line
         self.node_stack = []  # type: List[ast3.AST]
 
-    def visit(self, node: ast3.AST) -> Type:
+    def _visit_implementation(self, node: Optional[ast3.AST]) -> Optional[Type]:
         """Modified visit -- keep track of the stack of nodes"""
+        if node is None:
+            return None
         self.node_stack.append(node)
         try:
             return super().visit(node)
         finally:
             self.node_stack.pop()
 
-    def parent(self) -> ast3.AST:
+    if sys.version_info >= (3, 6):
+        @overload
+        def visit(self, node: ast3.expr) -> Type: ...
+
+        @overload  # noqa
+        def visit(self, node: Optional[ast3.AST]) -> Optional[Type]: ...
+
+        def visit(self, node: Optional[ast3.AST]) -> Optional[Type]:  # noqa
+            return self._visit_implementation(node)
+    else:
+        def visit(self, node: Optional[ast3.AST]) -> Any:
+            return self._visit_implementation(node)
+
+    def parent(self) -> Optional[ast3.AST]:
         """Return the AST node above the one we are processing"""
         if len(self.node_stack) < 2:
             return None
         return self.node_stack[-2]
 
     def fail(self, msg: str, line: int, column: int) -> None:
-        self.errors.report(line, column, msg)
+        if self.errors:
+            self.errors.report(line, column, msg)
 
     def visit_raw_str(self, s: str) -> Type:
         # An escape hatch that allows the AST walker in fastparse2 to
         # directly hook into the Python 3.5 type converter in some cases
         # without needing to create an intermediary `ast3.Str` object.
-        return parse_type_comment(s.strip(), self.line, self.errors) or AnyType()
+        return (parse_type_comment(s.strip(), self.line, self.errors) or
+                AnyType(TypeOfAny.from_error))
 
     def generic_visit(self, node: ast3.AST) -> Type:  # type: ignore
         self.fail(TYPE_COMMENT_AST_ERROR, self.line, getattr(node, 'col_offset', -1))
-        return AnyType()
-
-    def visit_NoneType(self, n: Any) -> Type:
-        return None
+        return AnyType(TypeOfAny.from_error)
 
-    def translate_expr_list(self, l: Sequence[ast3.AST]) -> List[Type]:
+    def translate_expr_list(self, l: Sequence[ast3.expr]) -> List[Type]:
         return [self.visit(e) for e in l]
 
     def visit_Call(self, e: ast3.Call) -> Type:
@@ -1008,11 +1038,13 @@ class TypeConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
         if not constructor:
             self.fail("Expected arg constructor name", e.lineno, e.col_offset)
         name = None  # type: Optional[str]
-        default_type = AnyType(implicit=True)
+        default_type = AnyType(TypeOfAny.special_form)
         typ = default_type  # type: Type
         for i, arg in enumerate(e.args):
             if i == 0:
-                typ = self.visit(arg)
+                converted = self.visit(arg)
+                assert converted is not None
+                typ = converted
             elif i == 1:
                 name = self._extract_argument_name(arg)
             else:
@@ -1029,17 +1061,19 @@ class TypeConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
                 if typ is not default_type:
                     self.fail('"{}" gets multiple values for keyword argument "type"'.format(
                         constructor), f.lineno, f.col_offset)
-                typ = self.visit(value)
+                converted = self.visit(value)
+                assert converted is not None
+                typ = converted
             else:
                 self.fail(
                     'Unexpected argument "{}" for argument constructor'.format(k.arg),
                     value.lineno, value.col_offset)
         return CallableArgument(typ, name, constructor, e.lineno, e.col_offset)
 
-    def translate_argument_list(self, l: Sequence[ast3.AST]) -> TypeList:
+    def translate_argument_list(self, l: Sequence[ast3.expr]) -> TypeList:
         return TypeList([self.visit(e) for e in l], line=self.line)
 
-    def _extract_argument_name(self, n: ast3.expr) -> str:
+    def _extract_argument_name(self, n: ast3.expr) -> Optional[str]:
         if isinstance(n, ast3.Str):
             return n.s.strip()
         elif isinstance(n, ast3.NameConstant) and str(n.value) == 'None':
@@ -1056,13 +1090,14 @@ class TypeConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
 
     # Str(string s)
     def visit_Str(self, n: ast3.Str) -> Type:
-        return parse_type_comment(n.s.strip(), self.line, self.errors) or AnyType()
+        return (parse_type_comment(n.s.strip(), self.line, self.errors) or
+                AnyType(TypeOfAny.from_error))
 
     # Subscript(expr value, slice slice, expr_context ctx)
     def visit_Subscript(self, n: ast3.Subscript) -> Type:
         if not isinstance(n.slice, ast3.Index):
             self.fail(TYPE_COMMENT_SYNTAX_ERROR, self.line, getattr(n, 'col_offset', -1))
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
 
         empty_tuple_index = False
         if isinstance(n.slice.value, ast3.Tuple):
@@ -1078,10 +1113,11 @@ class TypeConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
                                empty_tuple_index=empty_tuple_index)
         else:
             self.fail(TYPE_COMMENT_AST_ERROR, self.line, getattr(n, 'col_offset', -1))
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
 
     def visit_Tuple(self, n: ast3.Tuple) -> Type:
-        return TupleType(self.translate_expr_list(n.elts), None, implicit=True, line=self.line)
+        return TupleType(self.translate_expr_list(n.elts), _dummy_fallback,
+                         implicit=True, line=self.line)
 
     # Attribute(expr value, identifier attr, expr_context ctx)
     def visit_Attribute(self, n: ast3.Attribute) -> Type:
@@ -1091,7 +1127,7 @@ class TypeConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
             return UnboundType("{}.{}".format(before_dot.name, n.attr), line=self.line)
         else:
             self.fail(TYPE_COMMENT_AST_ERROR, self.line, getattr(n, 'col_offset', -1))
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
 
     # Ellipsis
     def visit_Ellipsis(self, n: ast3.Ellipsis) -> Type:
diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py
index 0f1bd63..3156f9e 100644
--- a/mypy/fastparse2.py
+++ b/mypy/fastparse2.py
@@ -36,7 +36,7 @@ from mypy.nodes import (
     ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_STAR2, OverloadPart, check_arg_names,
 )
 from mypy.types import (
-    Type, CallableType, AnyType, UnboundType, EllipsisType
+    Type, CallableType, AnyType, UnboundType, EllipsisType, TypeOfAny
 )
 from mypy import experiments
 from mypy import messages
@@ -69,11 +69,17 @@ T = TypeVar('T', bound=Union[ast27.expr, ast27.stmt])
 U = TypeVar('U', bound=Node)
 V = TypeVar('V')
 
+# There is no way to create reasonable fallbacks at this stage,
+# they must be patched later.
+_dummy_fallback = None  # type: Any
+
 TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment'
 TYPE_COMMENT_AST_ERROR = 'invalid type comment'
 
 
-def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
+def parse(source: Union[str, bytes],
+          fnam: str,
+          errors: Optional[Errors] = None,
           options: Options = Options()) -> MypyFile:
     """Parse a source file, without doing any semantic analysis.
 
@@ -84,8 +90,8 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
     if errors is None:
         errors = Errors()
         raise_on_error = True
-    errors.set_file('<input>' if fnam is None else fnam, None)
-    is_stub_file = bool(fnam) and fnam.endswith('.pyi')
+    errors.set_file(fnam, None)
+    is_stub_file = fnam.endswith('.pyi')
     try:
         assert options.python_version[0] < 3 and not is_stub_file
         ast = ast27.parse(source, fnam, 'exec')
@@ -115,7 +121,7 @@ def with_line(f: Callable[['ASTConverter', T], U]) -> Callable[['ASTConverter',
     return wrapper
 
 
-def find(f: Callable[[V], bool], seq: Sequence[V]) -> V:
+def find(f: Callable[[V], bool], seq: Sequence[V]) -> Optional[V]:
     for item in seq:
         if f(item):
             return item
@@ -149,8 +155,10 @@ class ASTConverter(ast27.NodeTransformer):
     def generic_visit(self, node: ast27.AST) -> None:
         raise RuntimeError('AST node not implemented: ' + str(type(node)))
 
-    def visit_NoneType(self, n: Any) -> Optional[Node]:
-        return None
+    def visit(self, node: Optional[ast27.AST]) -> Any:  # same as in typed_ast stub
+        if node is None:
+            return None
+        return super().visit(node)
 
     def translate_expr_list(self, l: Sequence[ast27.AST]) -> List[Expression]:
         res = []  # type: List[Expression]
@@ -212,13 +220,19 @@ class ASTConverter(ast27.NodeTransformer):
         else:
             return op_name
 
-    def as_block(self, stmts: List[ast27.stmt], lineno: int) -> Block:
+    def as_block(self, stmts: List[ast27.stmt], lineno: int) -> Optional[Block]:
         b = None
         if stmts:
             b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts)))
             b.set_line(lineno)
         return b
 
+    def as_required_block(self, stmts: List[ast27.stmt], lineno: int) -> Block:
+        assert stmts  # must be non-empty
+        b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts)))
+        b.set_line(lineno)
+        return b
+
     def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]:
         ret = []  # type: List[Statement]
         current_overload = []  # type: List[OverloadPart]
@@ -289,7 +303,7 @@ class ASTConverter(ast27.NodeTransformer):
         if special_function_elide_names(n.name):
             arg_names = [None] * len(arg_names)
 
-        arg_types = None  # type: List[Type]
+        arg_types = []  # type: List[Optional[Type]]
         if (n.decorator_list and any(is_no_type_check_decorator(d) for d in n.decorator_list)):
             arg_types = [None] * len(args)
             return_type = None
@@ -300,23 +314,25 @@ class ASTConverter(ast27.NodeTransformer):
                 # for ellipsis arg
                 if (len(func_type_ast.argtypes) == 1 and
                         isinstance(func_type_ast.argtypes[0], ast3.Ellipsis)):
-                    arg_types = [a.type_annotation if a.type_annotation is not None else AnyType()
-                                for a in args]
+                    arg_types = [a.type_annotation
+                                 if a.type_annotation is not None
+                                 else AnyType(TypeOfAny.unannotated)
+                                 for a in args]
                 else:
                     # PEP 484 disallows both type annotations and type comments
                     if any(a.type_annotation is not None for a in args):
                         self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset)
-                    arg_types = [a if a is not None else AnyType() for
-                                a in converter.translate_expr_list(func_type_ast.argtypes)]
+                    arg_types = [a if a is not None else AnyType(TypeOfAny.unannotated) for
+                                 a in converter.translate_expr_list(func_type_ast.argtypes)]
                 return_type = converter.visit(func_type_ast.returns)
 
                 # add implicit self type
                 if self.in_class() and len(arg_types) < len(args):
-                    arg_types.insert(0, AnyType())
+                    arg_types.insert(0, AnyType(TypeOfAny.special_form))
             except SyntaxError:
                 self.fail(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset)
-                arg_types = [AnyType()] * len(args)
-                return_type = AnyType()
+                arg_types = [AnyType(TypeOfAny.from_error)] * len(args)
+                return_type = AnyType(TypeOfAny.from_error)
         else:
             arg_types = [a.type_annotation for a in args]
             return_type = converter.visit(None)
@@ -334,13 +350,14 @@ class ASTConverter(ast27.NodeTransformer):
             elif len(arg_types) < len(arg_kinds):
                 self.fail('Type signature has too few arguments', n.lineno, 0)
             else:
-                func_type = CallableType([a if a is not None else AnyType() for a in arg_types],
+                any_type = AnyType(TypeOfAny.unannotated)
+                func_type = CallableType([a if a is not None else any_type for a in arg_types],
                                         arg_kinds,
                                         arg_names,
-                                        return_type if return_type is not None else AnyType(),
-                                        None)
+                                        return_type if return_type is not None else any_type,
+                                        _dummy_fallback)
 
-        body = self.as_block(n.body, n.lineno)
+        body = self.as_required_block(n.body, n.lineno)
         if decompose_stmts:
             body.body = decompose_stmts + body.body
         func_def = FuncDef(n.name,
@@ -363,7 +380,7 @@ class ASTConverter(ast27.NodeTransformer):
         else:
             return func_def
 
-    def set_type_optional(self, type: Type, initializer: Expression) -> None:
+    def set_type_optional(self, type: Optional[Type], initializer: Optional[Expression]) -> None:
         if self.options.no_implicit_optional:
             return
         # Indicate that type should be wrapped in an Optional if arg is initialized to None.
@@ -375,9 +392,7 @@ class ASTConverter(ast27.NodeTransformer):
                        n: ast27.arguments,
                        line: int,
                        ) -> Tuple[List[Argument], List[Statement]]:
-        # TODO: remove the cast once https://github.com/python/typeshed/pull/522
-        # is accepted and synced
-        type_comments = cast(List[str], n.type_comments)  # type: ignore
+        type_comments = n.type_comments
         converter = TypeConverter(self.errors, line=line)
         decompose_stmts = []  # type: List[Statement]
 
@@ -458,7 +473,7 @@ class ASTConverter(ast27.NodeTransformer):
         self.class_nesting += 1
 
         cdef = ClassDef(n.name,
-                        self.as_block(n.body, n.lineno),
+                        self.as_required_block(n.body, n.lineno),
                         None,
                         self.translate_expr_list(n.bases),
                         metaclass=None)
@@ -508,7 +523,7 @@ class ASTConverter(ast27.NodeTransformer):
             target_type = None
         return ForStmt(self.visit(n.target),
                        self.visit(n.iter),
-                       self.as_block(n.body, n.lineno),
+                       self.as_required_block(n.body, n.lineno),
                        self.as_block(n.orelse, n.lineno),
                        target_type)
 
@@ -516,14 +531,14 @@ class ASTConverter(ast27.NodeTransformer):
     @with_line
     def visit_While(self, n: ast27.While) -> WhileStmt:
         return WhileStmt(self.visit(n.test),
-                         self.as_block(n.body, n.lineno),
+                         self.as_required_block(n.body, n.lineno),
                          self.as_block(n.orelse, n.lineno))
 
     # If(expr test, stmt* body, stmt* orelse)
     @with_line
     def visit_If(self, n: ast27.If) -> IfStmt:
         return IfStmt([self.visit(n.test)],
-                      [self.as_block(n.body, n.lineno)],
+                      [self.as_required_block(n.body, n.lineno)],
                       self.as_block(n.orelse, n.lineno))
 
     # With(withitem* items, stmt* body, string? type_comment)
@@ -535,7 +550,7 @@ class ASTConverter(ast27.NodeTransformer):
             target_type = None
         return WithStmt([self.visit(n.context_expr)],
                         [self.visit(n.optional_vars)],
-                        self.as_block(n.body, n.lineno),
+                        self.as_required_block(n.body, n.lineno),
                         target_type)
 
     @with_line
@@ -571,19 +586,20 @@ class ASTConverter(ast27.NodeTransformer):
                     orelse: List[ast27.stmt],
                     finalbody: List[ast27.stmt],
                     lineno: int) -> TryStmt:
-        def produce_name(item: ast27.ExceptHandler) -> Optional[NameExpr]:
+        vs = []  # type: List[Optional[NameExpr]]
+        for item in handlers:
             if item.name is None:
-                return None
+                vs.append(None)
             elif isinstance(item.name, ast27.Name):
-                return NameExpr(item.name.id)
+                vs.append(NameExpr(item.name.id))
             else:
-                raise RuntimeError("'{}' has non-Name name.".format(ast27.dump(item)))
-
-        vs = [produce_name(h) for h in handlers]
+                self.fail("Sorry, `except <expr>, <anything but a name>` is not supported",
+                          item.lineno, item.col_offset)
+                vs.append(None)
         types = [self.visit(h.type) for h in handlers]
-        handlers_ = [self.as_block(h.body, h.lineno) for h in handlers]
+        handlers_ = [self.as_required_block(h.body, h.lineno) for h in handlers]
 
-        return TryStmt(self.as_block(body, lineno),
+        return TryStmt(self.as_required_block(body, lineno),
                        vs,
                        types,
                        handlers_,
@@ -612,7 +628,7 @@ class ASTConverter(ast27.NodeTransformer):
     # Import(alias* names)
     @with_line
     def visit_Import(self, n: ast27.Import) -> Import:
-        names = []  # type: List[Tuple[str, str]]
+        names = []  # type: List[Tuple[str, Optional[str]]]
         for alias in n.names:
             name = self.translate_module_id(alias.name)
             asname = alias.asname
@@ -629,9 +645,10 @@ class ASTConverter(ast27.NodeTransformer):
     # ImportFrom(identifier? module, alias* names, int? level)
     @with_line
     def visit_ImportFrom(self, n: ast27.ImportFrom) -> ImportBase:
-        i = None  # type: ImportBase
+        assert n.level is not None
         if len(n.names) == 1 and n.names[0].name == '*':
-            i = ImportAll(n.module, n.level)
+            assert n.module is not None
+            i = ImportAll(n.module, n.level)  # type: ImportBase
         else:
             i = ImportFrom(self.translate_module_id(n.module) if n.module is not None else '',
                            n.level,
@@ -671,7 +688,6 @@ class ASTConverter(ast27.NodeTransformer):
     def visit_BoolOp(self, n: ast27.BoolOp) -> OpExpr:
         # mypy translates (1 and 2 and 3) as (1 and (2 and 3))
         assert len(n.values) >= 2
-        op = None
         if isinstance(n.op, ast27.And):
             op = 'and'
         elif isinstance(n.op, ast27.Or):
@@ -724,7 +740,7 @@ class ASTConverter(ast27.NodeTransformer):
         n_body = ast27.Return(n.body)
         n_body.lineno = n.lineno
         n_body.col_offset = n.col_offset
-        body = self.as_block([n_body], n.lineno)
+        body = self.as_required_block([n_body], n.lineno)
         if decompose_stmts:
             body.body = decompose_stmts + body.body
 
@@ -824,7 +840,7 @@ class ASTConverter(ast27.NodeTransformer):
         return CallExpr(self.visit(n.func),
                         self.translate_expr_list(arg_types),
                         arg_kinds,
-                        cast("List[str]", signature))
+                        signature)
 
     # Num(object n) -- a number as a PyObject.
     @with_line
@@ -835,9 +851,8 @@ class ASTConverter(ast27.NodeTransformer):
             value = -new.n
             is_inverse = True
 
-        expr = None  # type: Expression
         if isinstance(value, int):
-            expr = IntExpr(value)
+            expr = IntExpr(value)  # type: Expression
         elif isinstance(value, float):
             expr = FloatExpr(value)
         elif isinstance(value, complex):
@@ -878,7 +893,7 @@ class ASTConverter(ast27.NodeTransformer):
         if (isinstance(n.value, ast27.Call) and
                 isinstance(n.value.func, ast27.Name) and
                 n.value.func.id == 'super'):
-            return SuperExpr(n.attr)
+            return SuperExpr(n.attr, self.visit(n.value))
 
         return MemberExpr(self.visit(n.value), n.attr)
 
diff --git a/mypy/indirection.py b/mypy/indirection.py
index 2e69c5e..badbe38 100644
--- a/mypy/indirection.py
+++ b/mypy/indirection.py
@@ -101,3 +101,6 @@ class TypeIndirectionVisitor(SyntheticTypeVisitor[Set[str]]):
 
     def visit_type_type(self, t: types.TypeType) -> Set[str]:
         return self._visit(t.item)
+
+    def visit_forwardref_type(self, t: types.ForwardRef) -> Set[str]:
+        return self._visit(t.link)
diff --git a/mypy/infer.py b/mypy/infer.py
index 6820a2c..b7d0dca 100644
--- a/mypy/infer.py
+++ b/mypy/infer.py
@@ -1,6 +1,6 @@
 """Utilities for type argument inference."""
 
-from typing import List, Optional
+from typing import List, Optional, Sequence
 
 from mypy.constraints import infer_constraints, infer_constraints_for_callable
 from mypy.types import Type, TypeVarId, CallableType
@@ -9,7 +9,7 @@ from mypy.constraints import SUBTYPE_OF
 
 
 def infer_function_type_arguments(callee_type: CallableType,
-                                  arg_types: List[Optional[Type]],
+                                  arg_types: Sequence[Optional[Type]],
                                   arg_kinds: List[int],
                                   formal_to_actual: List[List[int]],
                                   strict: bool = True) -> List[Optional[Type]]:
diff --git a/mypy/join.py b/mypy/join.py
index 132017e..e306a3f 100644
--- a/mypy/join.py
+++ b/mypy/join.py
@@ -4,13 +4,15 @@ from collections import OrderedDict
 from typing import cast, List, Optional
 
 from mypy.types import (
-    Type, AnyType, NoneTyp, TypeVisitor, Instance, UnboundType,
-    TypeVarType, CallableType, TupleType, TypedDictType, ErasedType, TypeList,
-    UnionType, FunctionLike, Overloaded, PartialType, DeletedType,
-    UninhabitedType, TypeType, true_or_false
+    Type, AnyType, NoneTyp, TypeVisitor, Instance, UnboundType, TypeVarType, CallableType,
+    TupleType, TypedDictType, ErasedType, TypeList, UnionType, FunctionLike, Overloaded,
+    PartialType, DeletedType, UninhabitedType, TypeType, true_or_false, TypeOfAny
 )
 from mypy.maptype import map_instance_to_supertype
-from mypy.subtypes import is_subtype, is_equivalent, is_subtype_ignoring_tvars, is_proper_subtype
+from mypy.subtypes import (
+    is_subtype, is_equivalent, is_subtype_ignoring_tvars, is_proper_subtype,
+    is_protocol_implementation
+)
 
 from mypy import experiments
 
@@ -99,7 +101,7 @@ class TypeJoinVisitor(TypeVisitor[Type]):
         self.s = s
 
     def visit_unbound_type(self, t: UnboundType) -> Type:
-        return AnyType()
+        return AnyType(TypeOfAny.special_form)
 
     def visit_union_type(self, t: UnionType) -> Type:
         if is_subtype(self.s, t):
@@ -115,7 +117,7 @@ class TypeJoinVisitor(TypeVisitor[Type]):
             if isinstance(self.s, (NoneTyp, UninhabitedType)):
                 return t
             elif isinstance(self.s, UnboundType):
-                return AnyType()
+                return AnyType(TypeOfAny.special_form)
             else:
                 return UnionType.make_simplified_union([self.s, t])
         else:
@@ -138,7 +140,18 @@ class TypeJoinVisitor(TypeVisitor[Type]):
 
     def visit_instance(self, t: Instance) -> Type:
         if isinstance(self.s, Instance):
-            return join_instances(t, self.s)
+            nominal = join_instances(t, self.s)
+            structural = None  # type: Optional[Instance]
+            if t.type.is_protocol and is_protocol_implementation(self.s, t):
+                structural = t
+            elif self.s.type.is_protocol and is_protocol_implementation(t, self.s):
+                structural = self.s
+            # Structural join is preferred in the case where we have found both
+            # structural and nominal and they have same MRO length (see two comments
+            # in join_instances_via_supertype). Otherwise, just return the nominal join.
+            if not structural or is_better(nominal, structural):
+                return nominal
+            return structural
         elif isinstance(self.s, FunctionLike):
             return join_types(t, self.s.fallback)
         elif isinstance(self.s, TypeType):
@@ -238,7 +251,7 @@ class TypeJoinVisitor(TypeVisitor[Type]):
             required_keys = set(items.keys()) & t.required_keys & self.s.required_keys
             return TypedDictType(items, required_keys, fallback)
         elif isinstance(self.s, Instance):
-            return join_instances(self.s, t.fallback)
+            return join_types(self.s, t.fallback)
         else:
             return self.default(self.s)
 
@@ -262,7 +275,7 @@ class TypeJoinVisitor(TypeVisitor[Type]):
         if isinstance(typ, Instance):
             return object_from_instance(typ)
         elif isinstance(typ, UnboundType):
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
         elif isinstance(typ, TupleType):
             return self.default(typ.fallback)
         elif isinstance(typ, TypedDictType):
@@ -272,7 +285,7 @@ class TypeJoinVisitor(TypeVisitor[Type]):
         elif isinstance(typ, TypeVarType):
             return self.default(typ.upper_bound)
         else:
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
 
 
 def join_instances(t: Instance, s: Instance) -> Type:
diff --git a/mypy/literals.py b/mypy/literals.py
new file mode 100644
index 0000000..39cdb21
--- /dev/null
+++ b/mypy/literals.py
@@ -0,0 +1,233 @@
+from typing import Optional, Union, Any, Tuple, Iterable
+
+from mypy.nodes import (
+    Expression, ComparisonExpr, OpExpr, MemberExpr, UnaryExpr, StarExpr, IndexExpr, LITERAL_YES,
+    LITERAL_NO, NameExpr, LITERAL_TYPE, IntExpr, FloatExpr, ComplexExpr, StrExpr, BytesExpr,
+    UnicodeExpr, ListExpr, TupleExpr, SetExpr, DictExpr, CallExpr, SliceExpr, CastExpr,
+    ConditionalExpr, EllipsisExpr, YieldFromExpr, YieldExpr, RevealTypeExpr, SuperExpr,
+    TypeApplication, LambdaExpr, ListComprehension, SetComprehension, DictionaryComprehension,
+    GeneratorExpr, BackquoteExpr, TypeVarExpr, TypeAliasExpr, NamedTupleExpr, EnumCallExpr,
+    TypedDictExpr, NewTypeExpr, PromoteExpr, AwaitExpr, TempNode,
+)
+from mypy.visitor import ExpressionVisitor
+
+# [Note Literals and literal_hash]
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+#
+# Mypy uses the term "literal" to refer to any expression built out of
+# the following:
+#
+# * Plain literal expressions, like `1` (integer, float, string, etc.)
+#
+# * Compound literal expressions, like `(lit1, lit2)` (list, dict,
+#   set, or tuple)
+#
+# * Operator expressions, like `lit1 + lit2`
+#
+# * Variable references, like `x`
+#
+# * Member references, like `lit.m`
+#
+# * Index expressions, like `lit[0]`
+#
+# A typical "literal" looks like `x[(i,j+1)].m`.
+#
+# An expression that is a literal has a `literal_hash`, with the
+# following properties.
+#
+# * `literal_hash` is a Key: a tuple containing basic data types and
+#   possibly other Keys. So it can be used as a key in a dictionary
+#   that will be compared by value (as opposed to the Node itself,
+#   which is compared by identity).
+#
+# * Two expressions have equal `literal_hash`es if and only if they
+#   are syntactically equal expressions. (NB: Actually, we also
+#   identify as equal expressions like `3` and `3.0`; is this a good
+#   idea?)
+#
+# * The elements of `literal_hash` that are tuples are exactly the
+#   subexpressions of the original expression (e.g. the base and index
+#   of an index expression, or the operands of an operator expression).
+
+
+def literal(e: Expression) -> int:
+    if isinstance(e, ComparisonExpr):
+        return min(literal(o) for o in e.operands)
+
+    elif isinstance(e, OpExpr):
+        return min(literal(e.left), literal(e.right))
+
+    elif isinstance(e, (MemberExpr, UnaryExpr, StarExpr)):
+        return literal(e.expr)
+
+    elif isinstance(e, IndexExpr):
+        if literal(e.index) == LITERAL_YES:
+            return literal(e.base)
+        else:
+            return LITERAL_NO
+
+    elif isinstance(e, NameExpr):
+        return LITERAL_TYPE
+
+    if isinstance(e, (IntExpr, FloatExpr, ComplexExpr, StrExpr, BytesExpr, UnicodeExpr)):
+        return LITERAL_YES
+
+    if literal_hash(e):
+        return LITERAL_YES
+
+    return LITERAL_NO
+
+
+Key = Tuple[Any, ...]
+
+
+def subkeys(key: Key) -> Iterable[Key]:
+    return [elt for elt in key if isinstance(elt, tuple)]
+
+
+def literal_hash(e: Expression) -> Optional[Key]:
+    return e.accept(_hasher)
+
+
+class _Hasher(ExpressionVisitor[Optional[Key]]):
+    def visit_int_expr(self, e: IntExpr) -> Key:
+        return ('Literal', e.value)
+
+    def visit_str_expr(self, e: StrExpr) -> Key:
+        return ('Literal', e.value)
+
+    def visit_bytes_expr(self, e: BytesExpr) -> Key:
+        return ('Literal', e.value)
+
+    def visit_unicode_expr(self, e: UnicodeExpr) -> Key:
+        return ('Literal', e.value)
+
+    def visit_float_expr(self, e: FloatExpr) -> Key:
+        return ('Literal', e.value)
+
+    def visit_complex_expr(self, e: ComplexExpr) -> Key:
+        return ('Literal', e.value)
+
+    def visit_star_expr(self, e: StarExpr) -> Key:
+        return ('Star', literal_hash(e.expr))
+
+    def visit_name_expr(self, e: NameExpr) -> Key:
+        return ('Var', e.name)
+
+    def visit_member_expr(self, e: MemberExpr) -> Key:
+        return ('Member', literal_hash(e.expr), e.name)
+
+    def visit_op_expr(self, e: OpExpr) -> Key:
+        return ('Binary', e.op, literal_hash(e.left), literal_hash(e.right))
+
+    def visit_comparison_expr(self, e: ComparisonExpr) -> Key:
+        rest = tuple(e.operators)  # type: Any
+        rest += tuple(literal_hash(o) for o in e.operands)
+        return ('Comparison',) + rest
+
+    def visit_unary_expr(self, e: UnaryExpr) -> Key:
+        return ('Unary', e.op, literal_hash(e.expr))
+
+    def seq_expr(self, e: Union[ListExpr, TupleExpr, SetExpr], name: str) -> Optional[Key]:
+        if all(literal(x) == LITERAL_YES for x in e.items):
+            rest = tuple(literal_hash(x) for x in e.items)  # type: Any
+            return (name,) + rest
+        return None
+
+    def visit_list_expr(self, e: ListExpr) -> Optional[Key]:
+        return self.seq_expr(e, 'List')
+
+    def visit_dict_expr(self, e: DictExpr) -> Optional[Key]:
+        if all(a and literal(a) == literal(b) == LITERAL_YES for a, b in e.items):
+            rest = tuple((literal_hash(a), literal_hash(b)) for a, b in e.items)  # type: Any
+            return ('Dict',) + rest
+        return None
+
+    def visit_tuple_expr(self, e: TupleExpr) -> Optional[Key]:
+        return self.seq_expr(e, 'Tuple')
+
+    def visit_set_expr(self, e: SetExpr) -> Optional[Key]:
+        return self.seq_expr(e, 'Set')
+
+    def visit_index_expr(self, e: IndexExpr) -> Optional[Key]:
+        if literal(e.index) == LITERAL_YES:
+            return ('Index', literal_hash(e.base), literal_hash(e.index))
+        return None
+
+    def visit_call_expr(self, e: CallExpr) -> None:
+        return None
+
+    def visit_slice_expr(self, e: SliceExpr) -> None:
+        return None
+
+    def visit_cast_expr(self, e: CastExpr) -> None:
+        return None
+
+    def visit_conditional_expr(self, e: ConditionalExpr) -> None:
+        return None
+
+    def visit_ellipsis(self, e: EllipsisExpr) -> None:
+        return None
+
+    def visit_yield_from_expr(self, e: YieldFromExpr) -> None:
+        return None
+
+    def visit_yield_expr(self, e: YieldExpr) -> None:
+        return None
+
+    def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None:
+        return None
+
+    def visit_super_expr(self, e: SuperExpr) -> None:
+        return None
+
+    def visit_type_application(self, e: TypeApplication) -> None:
+        return None
+
+    def visit_lambda_expr(self, e: LambdaExpr) -> None:
+        return None
+
+    def visit_list_comprehension(self, e: ListComprehension) -> None:
+        return None
+
+    def visit_set_comprehension(self, e: SetComprehension) -> None:
+        return None
+
+    def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None:
+        return None
+
+    def visit_generator_expr(self, e: GeneratorExpr) -> None:
+        return None
+
+    def visit_backquote_expr(self, e: BackquoteExpr) -> None:
+        return None
+
+    def visit_type_var_expr(self, e: TypeVarExpr) -> None:
+        return None
+
+    def visit_type_alias_expr(self, e: TypeAliasExpr) -> None:
+        return None
+
+    def visit_namedtuple_expr(self, e: NamedTupleExpr) -> None:
+        return None
+
+    def visit_enum_call_expr(self, e: EnumCallExpr) -> None:
+        return None
+
+    def visit_typeddict_expr(self, e: TypedDictExpr) -> None:
+        return None
+
+    def visit_newtype_expr(self, e: NewTypeExpr) -> None:
+        return None
+
+    def visit__promote_expr(self, e: PromoteExpr) -> None:
+        return None
+
+    def visit_await_expr(self, e: AwaitExpr) -> None:
+        return None
+
+    def visit_temp_node(self, e: TempNode) -> None:
+        return None
+
+
+_hasher = _Hasher()
diff --git a/mypy/main.py b/mypy/main.py
index 4e59f68..b194dbe 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -28,7 +28,7 @@ class InvalidPackageName(Exception):
     """Exception indicating that a package name was invalid."""
 
 
-def main(script_path: Optional[str], args: List[str] = None) -> None:
+def main(script_path: Optional[str], args: Optional[List[str]] = None) -> None:
     """Main entry point to the type checker.
 
     Args:
@@ -38,7 +38,7 @@ def main(script_path: Optional[str], args: List[str] = None) -> None:
     """
     t0 = time.time()
     if script_path:
-        bin_dir = find_bin_directory(script_path)
+        bin_dir = find_bin_directory(script_path)  # type: Optional[str]
     else:
         bin_dir = None
     sys.setrecursionlimit(2 ** 14)
@@ -53,6 +53,11 @@ def main(script_path: Optional[str], args: List[str] = None) -> None:
         a = e.messages
         if not e.use_stdout:
             serious = True
+    if options.warn_unused_configs and options.unused_configs:
+        print("Warning: unused section(s) in %s: %s" %
+              (options.config_file,
+               ", ".join("[mypy-%s]" % glob for glob in options.unused_configs.values())),
+              file=sys.stderr)
     if options.junit_xml:
         t1 = time.time()
         util.write_junit_xml(t1 - t0, serious, a, options.junit_xml)
@@ -90,8 +95,9 @@ def readlinkabs(link: str) -> str:
     return os.path.join(os.path.dirname(link), path)
 
 
-def type_check_only(sources: List[BuildSource], bin_dir: str, options: Options) -> BuildResult:
-    # Type-check the program and dependencies and translate to Python.
+def type_check_only(sources: List[BuildSource], bin_dir: Optional[str],
+                    options: Options) -> BuildResult:
+    # Type-check the program and dependencies.
     return build.build(sources=sources,
                        bin_dir=bin_dir,
                        options=options)
@@ -152,9 +158,10 @@ def parse_version(v: str) -> Tuple[int, int]:
             raise argparse.ArgumentTypeError(
                 "Python 2.{} is not supported (must be 2.7)".format(minor))
     elif major == 3:
-        if minor <= 2:
+        if minor < defaults.PYTHON3_VERSION_MIN[1]:
             raise argparse.ArgumentTypeError(
-                "Python 3.{} is not supported (must be 3.3 or higher)".format(minor))
+                "Python 3.{0} is not supported (must be {1}.{2} or higher)".format(minor,
+                                                                    *defaults.PYTHON3_VERSION_MIN))
     else:
         raise argparse.ArgumentTypeError(
             "Python major version '{}' out of range (must be 2 or 3)".format(major))
@@ -163,7 +170,7 @@ def parse_version(v: str) -> Tuple[int, int]:
 
 # Make the help output a little less jarring.
 class AugmentedHelpFormatter(argparse.HelpFormatter):
-    def __init__(self, prog: Optional[str]) -> None:
+    def __init__(self, prog: str) -> None:
         super().__init__(prog=prog, max_help_position=28)
 
 
@@ -204,9 +211,9 @@ def process_options(args: List[str],
 
     def add_invertible_flag(flag: str,
                             *,
-                            inverse: str = None,
+                            inverse: Optional[str] = None,
                             default: bool,
-                            dest: str = None,
+                            dest: Optional[str] = None,
                             help: str,
                             strict_flag: bool = False
                             ) -> None:
@@ -226,6 +233,7 @@ def process_options(args: List[str],
                                   dest=dest,
                                   help=argparse.SUPPRESS)
         if strict_flag:
+            assert dest is not None
             strict_flag_names.append(flag)
             strict_flag_assignments.append((dest, not default))
 
@@ -258,6 +266,8 @@ def process_options(args: List[str],
     add_invertible_flag('--disallow-untyped-defs', default=False, strict_flag=True,
                         help="disallow defining functions without type annotations"
                         " or with incomplete type annotations")
+    add_invertible_flag('--disallow-incomplete-defs', default=False, strict_flag=True,
+                        help="disallow defining functions with incomplete type annotations")
     add_invertible_flag('--check-untyped-defs', default=False, strict_flag=True,
                         help="type check the interior of functions without type annotations")
     add_invertible_flag('--disallow-subclassing-any', default=False, strict_flag=True,
@@ -265,6 +275,8 @@ def process_options(args: List[str],
     add_invertible_flag('--warn-incomplete-stub', default=False,
                         help="warn if missing type annotation in typeshed, only relevant with"
                         " --check-untyped-defs enabled")
+    add_invertible_flag('--disallow-untyped-decorators', default=False, strict_flag=True,
+                        help="disallow decorating typed functions with untyped decorators")
     add_invertible_flag('--warn-redundant-casts', default=False, strict_flag=True,
                         help="warn about casting an expression to its inferred type")
     add_invertible_flag('--no-warn-no-return', dest='warn_no_return', default=True,
@@ -274,6 +286,8 @@ def process_options(args: List[str],
                              " from non-Any typed functions")
     add_invertible_flag('--warn-unused-ignores', default=False, strict_flag=True,
                         help="warn about unneeded '# type: ignore' comments")
+    add_invertible_flag('--warn-unused-configs', default=False, strict_flag=True,
+                        help="warn about unnused '[mypy-<pattern>]' config sections")
     add_invertible_flag('--show-error-context', default=False,
                         dest='show_error_context',
                         help='Precede errors with "note:" messages explaining context')
@@ -454,6 +468,7 @@ def process_options(args: List[str],
         experiments.STRICT_OPTIONAL = True
     if special_opts.find_occurrences:
         experiments.find_occurrences = special_opts.find_occurrences.split('.')
+        assert experiments.find_occurrences is not None
         if len(experiments.find_occurrences) < 2:
             parser.error("Can only find occurrences of class members.")
         if len(experiments.find_occurrences) != 2:
@@ -634,9 +649,8 @@ def parse_config_file(options: Options, filename: Optional[str]) -> None:
     If filename is None, fall back to default config file and then
     to setup.cfg.
     """
-    config_files = None  # type: Tuple[str, ...]
     if filename is not None:
-        config_files = (filename,)
+        config_files = (filename,)  # type: Tuple[str, ...]
     else:
         config_files = (defaults.CONFIG_FILE,) + SHARED_CONFIG_FILES
 
@@ -651,6 +665,7 @@ def parse_config_file(options: Options, filename: Optional[str]) -> None:
             print("%s: %s" % (config_file, err), file=sys.stderr)
         else:
             file_read = config_file
+            options.config_file = file_read
             break
     else:
         return
@@ -687,6 +702,7 @@ def parse_config_file(options: Options, filename: Optional[str]) -> None:
                     glob = glob.replace(os.altsep, '.')
                 pattern = re.compile(fnmatch.translate(glob))
                 options.per_module_options[pattern] = updates
+                options.unused_configs[pattern] = glob
 
 
 def parse_section(prefix: str, template: Options,
@@ -698,6 +714,7 @@ def parse_section(prefix: str, template: Options,
     results = {}  # type: Dict[str, object]
     report_dirs = {}  # type: Dict[str, str]
     for key in section:
+        orig_key = key
         key = key.replace('-', '_')
         if key in config_types:
             ct = config_types[key]
@@ -707,12 +724,12 @@ def parse_section(prefix: str, template: Options,
                 if key.endswith('_report'):
                     report_type = key[:-7].replace('_', '-')
                     if report_type in reporter_classes:
-                        report_dirs[report_type] = section.get(key)
+                        report_dirs[report_type] = section[orig_key]
                     else:
-                        print("%s: Unrecognized report type: %s" % (prefix, key),
+                        print("%s: Unrecognized report type: %s" % (prefix, orig_key),
                               file=sys.stderr)
                     continue
-                print("%s: Unrecognized option: %s = %s" % (prefix, key, section[key]),
+                print("%s: Unrecognized option: %s = %s" % (prefix, key, section[orig_key]),
                       file=sys.stderr)
                 continue
             ct = type(dv)
diff --git a/mypy/maptype.py b/mypy/maptype.py
index cdffd74..ed681c8 100644
--- a/mypy/maptype.py
+++ b/mypy/maptype.py
@@ -2,7 +2,7 @@ from typing import Dict, List
 
 from mypy.expandtype import expand_type
 from mypy.nodes import TypeInfo
-from mypy.types import Type, TypeVarId, Instance, AnyType
+from mypy.types import Type, TypeVarId, Instance, AnyType, TypeOfAny
 
 
 def map_instance_to_supertype(instance: Instance,
@@ -40,7 +40,8 @@ def map_instance_to_supertypes(instance: Instance,
         return result
     else:
         # Nothing. Presumably due to an error. Construct a dummy using Any.
-        return [Instance(supertype, [AnyType()] * len(supertype.type_vars))]
+        any_type = AnyType(TypeOfAny.from_error)
+        return [Instance(supertype, [any_type] * len(supertype.type_vars))]
 
 
 def class_derivation_paths(typ: TypeInfo,
@@ -86,7 +87,8 @@ def map_instance_to_direct_supertypes(instance: Instance,
     else:
         # Relationship with the supertype not specified explicitly. Use dynamic
         # type arguments implicitly.
-        return [Instance(supertype, [AnyType()] * len(supertype.type_vars))]
+        any_type = AnyType(TypeOfAny.unannotated)
+        return [Instance(supertype, [any_type] * len(supertype.type_vars))]
 
 
 def instance_to_type_environment(instance: Instance) -> Dict[TypeVarId, Type]:
diff --git a/mypy/meet.py b/mypy/meet.py
index a3b59c2..3e883b5 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -3,11 +3,11 @@ from typing import List, Optional, cast, Tuple
 
 from mypy.join import is_similar_callables, combine_similar_callables, join_type_list
 from mypy.types import (
-    Type, AnyType, TypeVisitor, UnboundType, NoneTyp, TypeVarType,
-    Instance, CallableType, TupleType, TypedDictType, ErasedType, TypeList, UnionType, PartialType,
-    DeletedType, UninhabitedType, TypeType
+    Type, AnyType, TypeVisitor, UnboundType, NoneTyp, TypeVarType, Instance, CallableType,
+    TupleType, TypedDictType, ErasedType, TypeList, UnionType, PartialType, DeletedType,
+    UninhabitedType, TypeType, TypeOfAny
 )
-from mypy.subtypes import is_equivalent, is_subtype
+from mypy.subtypes import is_equivalent, is_subtype, is_protocol_implementation
 
 from mypy import experiments
 
@@ -44,6 +44,8 @@ def narrow_declared_type(declared: Type, narrowed: Type) -> Type:
         return narrowed
     elif isinstance(declared, (Instance, TupleType)):
         return meet_types(declared, narrowed)
+    elif isinstance(declared, TypeType) and isinstance(narrowed, TypeType):
+        return TypeType.make_normalized(narrow_declared_type(declared.item, narrowed.item))
     return narrowed
 
 
@@ -52,7 +54,8 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
 
     Note that this effectively checks against erased types, since type
     variables are erased at runtime and the overlapping check is based
-    on runtime behavior.
+    on runtime behavior. The exception is protocol types, it is not safe,
+    but convenient and is an opt-in behavior.
 
     If use_promotions is True, also consider type promotions (int and
     float would only be overlapping if it's True).
@@ -100,7 +103,13 @@ def is_overlapping_types(t: Type, s: Type, use_promotions: bool = False) -> bool
                     return True
                 if s.type._promote and is_overlapping_types(s.type._promote, t):
                     return True
-            return t.type in s.type.mro or s.type in t.type.mro
+            if t.type in s.type.mro or s.type in t.type.mro:
+                return True
+            if t.type.is_protocol and is_protocol_implementation(s, t):
+                return True
+            if s.type.is_protocol and is_protocol_implementation(t, s):
+                return True
+            return False
     if isinstance(t, UnionType):
         return any(is_overlapping_types(item, s)
                    for item in t.relevant_items())
@@ -135,13 +144,13 @@ class TypeMeetVisitor(TypeVisitor[Type]):
     def visit_unbound_type(self, t: UnboundType) -> Type:
         if isinstance(self.s, NoneTyp):
             if experiments.STRICT_OPTIONAL:
-                return AnyType()
+                return AnyType(TypeOfAny.special_form)
             else:
                 return self.s
         elif isinstance(self.s, UninhabitedType):
             return self.s
         else:
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
 
     def visit_any(self, t: AnyType) -> Type:
         return self.s
@@ -296,7 +305,7 @@ class TypeMeetVisitor(TypeVisitor[Type]):
 
     def default(self, typ: Type) -> Type:
         if isinstance(typ, UnboundType):
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
         else:
             if experiments.STRICT_OPTIONAL:
                 return UninhabitedType()
diff --git a/mypy/messages.py b/mypy/messages.py
index 6375f4d..bdde2e1 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -1,24 +1,31 @@
 """Facilities and constants for generating error messages during type checking.
 
-The type checker itself does not deal with message string literals to
-improve code clarity and to simplify localization (in the future)."""
+Don't add any non-trivial message construction logic to the type
+checker, as it can compromise clarity and make messages less
+consistent. Add such logic to this module instead. Literal messages used
+in multiple places should also be defined as constants in this module so
+they won't get out of sync.
+
+Historically we tried to avoid all message string literals in the type
+checker but we are moving away from this convention.
+"""
 
 import re
 import difflib
 
-from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple, Optional
+from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple, Set, Optional, Union
 
 from mypy.erasetype import erase_type
 from mypy.errors import Errors
 from mypy.types import (
     Type, CallableType, Instance, TypeVarType, TupleType, TypedDictType,
     UnionType, NoneTyp, AnyType, Overloaded, FunctionLike, DeletedType, TypeType,
-    UninhabitedType
+    UninhabitedType, TypeOfAny, ForwardRef, UnboundType
 )
 from mypy.nodes import (
     TypeInfo, Context, MypyFile, op_methods, FuncDef, reverse_type_aliases,
     ARG_POS, ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2,
-    ReturnStmt, NameExpr, Var
+    ReturnStmt, NameExpr, Var, CONTRAVARIANT, COVARIANT
 )
 
 
@@ -29,7 +36,6 @@ NO_RETURN_VALUE_EXPECTED = 'No return value expected'
 MISSING_RETURN_STATEMENT = 'Missing return statement'
 INVALID_IMPLICIT_RETURN = 'Implicit return in function which does not return'
 INCOMPATIBLE_RETURN_VALUE_TYPE = 'Incompatible return value type'
-RETURN_ANY = 'Returning Any from function with declared return type "{}"'
 RETURN_VALUE_EXPECTED = 'Return value expected'
 NO_RETURN_EXPECTED = 'Return statement in function which does not return'
 INVALID_EXCEPTION = 'Exception must be derived from BaseException'
@@ -45,12 +51,12 @@ YIELD_VALUE_EXPECTED = 'Yield value expected'
 INCOMPATIBLE_TYPES = 'Incompatible types'
 INCOMPATIBLE_TYPES_IN_ASSIGNMENT = 'Incompatible types in assignment'
 INCOMPATIBLE_REDEFINITION = 'Incompatible redefinition'
-INCOMPATIBLE_TYPES_IN_AWAIT = 'Incompatible types in await'
-INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER = 'Incompatible types in "async with" for __aenter__'
-INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT = 'Incompatible types in "async with" for __aexit__'
+INCOMPATIBLE_TYPES_IN_AWAIT = 'Incompatible types in "await"'
+INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER = 'Incompatible types in "async with" for "__aenter__"'
+INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT = 'Incompatible types in "async with" for "__aexit__"'
 INCOMPATIBLE_TYPES_IN_ASYNC_FOR = 'Incompatible types in "async for"'
 
-INCOMPATIBLE_TYPES_IN_YIELD = 'Incompatible types in yield'
+INCOMPATIBLE_TYPES_IN_YIELD = 'Incompatible types in "yield"'
 INCOMPATIBLE_TYPES_IN_YIELD_FROM = 'Incompatible types in "yield from"'
 INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION = 'Incompatible types in string interpolation'
 MUST_HAVE_NONE_RETURN_TYPE = 'The return type of "{}" must be None'
@@ -92,6 +98,8 @@ CANNOT_ISINSTANCE_TYPEDDICT = 'Cannot use isinstance() with a TypedDict type'
 CANNOT_ISINSTANCE_NEWTYPE = 'Cannot use isinstance() with a NewType type'
 BARE_GENERIC = 'Missing type parameters for generic type'
 IMPLICIT_GENERIC_ANY_BUILTIN = 'Implicit generic "Any". Use \'{}\' and specify generic parameters'
+INCOMPATIBLE_TYPEVAR_VALUE = 'Value of type variable "{}" of {} cannot be {}'
+UNSUPPORTED_ARGUMENT_2_FOR_SUPER = 'Unsupported argument 2 for "super"'
 
 ARG_CONSTRUCTOR_NAMES = {
     ARG_POS: "Arg",
@@ -157,94 +165,60 @@ class MessageBuilder:
     def is_errors(self) -> bool:
         return self.errors.is_errors()
 
-    def report(self, msg: str, context: Context, severity: str,
-               file: str = None, origin: Context = None) -> None:
+    def report(self, msg: str, context: Optional[Context], severity: str,
+               file: Optional[str] = None, origin: Optional[Context] = None,
+               offset: int = 0) -> None:
         """Report an error or note (unless disabled)."""
         if self.disable_count <= 0:
             self.errors.report(context.get_line() if context else -1,
                                context.get_column() if context else -1,
-                               msg.strip(), severity=severity, file=file,
+                               msg.strip(), severity=severity, file=file, offset=offset,
                                origin_line=origin.get_line() if origin else None)
 
-    def fail(self, msg: str, context: Context, file: str = None,
-             origin: Context = None) -> None:
+    def fail(self, msg: str, context: Optional[Context], file: Optional[str] = None,
+             origin: Optional[Context] = None) -> None:
         """Report an error message (unless disabled)."""
         self.report(msg, context, 'error', file=file, origin=origin)
 
-    def note(self, msg: str, context: Context, file: str = None,
-             origin: Context = None) -> None:
+    def note(self, msg: str, context: Context, file: Optional[str] = None,
+             origin: Optional[Context] = None, offset: int = 0) -> None:
         """Report a note (unless disabled)."""
-        self.report(msg, context, 'note', file=file, origin=origin)
+        self.report(msg, context, 'note', file=file, origin=origin, offset=offset)
 
-    def warn(self, msg: str, context: Context, file: str = None,
-             origin: Context = None) -> None:
+    def warn(self, msg: str, context: Context, file: Optional[str] = None,
+             origin: Optional[Context] = None) -> None:
         """Report a warning message (unless disabled)."""
         self.report(msg, context, 'warning', file=file, origin=origin)
 
-    def format(self, typ: Type, verbosity: int = 0) -> str:
-        """Convert a type to a relatively short string that is suitable for error messages.
+    def quote_type_string(self, type_string: str) -> str:
+        """Quotes a type representation for use in messages."""
+        no_quote_regex = r'^<(tuple|union): \d+ items>$'
+        if (type_string in ['Module', 'overloaded function', '<nothing>', '<deleted>']
+                or re.match(no_quote_regex, type_string) is not None or type_string.endswith('?')):
+            # Messages are easier to read if these aren't quoted.  We use a
+            # regex to match strings with variable contents.
+            return type_string
+        return '"{}"'.format(type_string)
 
-        Mostly behave like format_simple below, but never return an empty string.
+    def format(self, typ: Type, verbosity: int = 0) -> str:
         """
-        s = self.format_simple(typ, verbosity)
-        if s != '':
-            # If format_simple returns a non-trivial result, use that.
-            return s
-        elif isinstance(typ, FunctionLike):
-            func = typ
-            if func.is_type_obj():
-                # The type of a type object type can be derived from the
-                # return type (this always works).
-                return self.format(TypeType.make_normalized(erase_type(func.items()[0].ret_type)),
-                                   verbosity)
-            elif isinstance(func, CallableType):
-                return_type = strip_quotes(self.format(func.ret_type))
-                if func.is_ellipsis_args:
-                    return 'Callable[..., {}]'.format(return_type)
-                arg_strings = []
-                for arg_name, arg_type, arg_kind in zip(
-                        func.arg_names, func.arg_types, func.arg_kinds):
-                    if (arg_kind == ARG_POS and arg_name is None
-                            or verbosity == 0 and arg_kind in (ARG_POS, ARG_OPT)):
-
-                        arg_strings.append(
-                            strip_quotes(
-                                self.format(
-                                    arg_type,
-                                    verbosity = max(verbosity - 1, 0))))
-                    else:
-                        constructor = ARG_CONSTRUCTOR_NAMES[arg_kind]
-                        if arg_kind in (ARG_STAR, ARG_STAR2) or arg_name is None:
-                            arg_strings.append("{}({})".format(
-                                constructor,
-                                strip_quotes(self.format(arg_type))))
-                        else:
-                            arg_strings.append("{}({}, {})".format(
-                                constructor,
-                                strip_quotes(self.format(arg_type)),
-                                repr(arg_name)))
-
-                return 'Callable[[{}], {}]'.format(", ".join(arg_strings), return_type)
-            else:
-                # Use a simple representation for function types; proper
-                # function types may result in long and difficult-to-read
-                # error messages.
-                return 'overloaded function'
-        else:
-            # Default case; we simply have to return something meaningful here.
-            return 'object'
+        Convert a type to a relatively short string suitable for error messages.
 
-    def format_simple(self, typ: Type, verbosity: int = 0) -> str:
-        """Convert simple types to string that is suitable for error messages.
+        This method returns a string appropriate for unmodified use in error
+        messages; this means that it will be quoted in most cases.  If
+        modification of the formatted string is required, callers should use
+        .format_bare.
+        """
+        return self.quote_type_string(self.format_bare(typ, verbosity))
 
-        Return "" for complex types. Try to keep the length of the result
-        relatively short to avoid overly long error messages.
+    def format_bare(self, typ: Type, verbosity: int = 0) -> str:
+        """
+        Convert a type to a relatively short string suitable for error messages.
 
-        Examples:
-          builtins.int -> 'int'
-          Any type -> 'Any'
-          None -> None
-          callable type -> "" (empty string)
+        This method will return an unquoted string.  If a caller doesn't need to
+        perform post-processing on the string output, .format should be used
+        instead.  (The caller may want to use .quote_type_string after
+        processing has happened, to maintain consistent quoting in messages.)
         """
         if isinstance(typ, Instance):
             itype = typ
@@ -258,26 +232,22 @@ class MessageBuilder:
             else:
                 base_str = itype.type.name()
             if itype.args == []:
-                # No type arguments. Place the type name in quotes to avoid
-                # potential for confusion: otherwise, the type name could be
-                # interpreted as a normal word.
-                return '"{}"'.format(base_str)
+                # No type arguments, just return the type name
+                return base_str
             elif itype.type.fullname() == 'builtins.tuple':
-                item_type_str = strip_quotes(self.format(itype.args[0]))
+                item_type_str = self.format_bare(itype.args[0])
                 return 'Tuple[{}, ...]'.format(item_type_str)
             elif itype.type.fullname() in reverse_type_aliases:
                 alias = reverse_type_aliases[itype.type.fullname()]
                 alias = alias.split('.')[-1]
-                items = [strip_quotes(self.format(arg)) for arg in itype.args]
+                items = [self.format_bare(arg) for arg in itype.args]
                 return '{}[{}]'.format(alias, ', '.join(items))
             else:
-                # There are type arguments. Convert the arguments to strings
-                # (using format() instead of format_simple() to avoid empty
-                # strings). If the result is too long, replace arguments
-                # with [...].
+                # There are type arguments. Convert the arguments to strings.
+                # If the result is too long, replace arguments with [...].
                 a = []  # type: List[str]
                 for arg in itype.args:
-                    a.append(strip_quotes(self.format(arg)))
+                    a.append(self.format_bare(arg))
                 s = ', '.join(a)
                 if len((base_str + s)) < 150:
                     return '{}[{}]'.format(base_str, s)
@@ -285,30 +255,30 @@ class MessageBuilder:
                     return '{}[...]'.format(base_str)
         elif isinstance(typ, TypeVarType):
             # This is similar to non-generic instance types.
-            return '"{}"'.format(typ.name)
+            return typ.name
         elif isinstance(typ, TupleType):
             # Prefer the name of the fallback class (if not tuple), as it's more informative.
             if typ.fallback.type.fullname() != 'builtins.tuple':
-                return self.format_simple(typ.fallback)
+                return self.format_bare(typ.fallback)
             items = []
             for t in typ.items:
-                items.append(strip_quotes(self.format(t)))
-            s = '"Tuple[{}]"'.format(', '.join(items))
+                items.append(self.format_bare(t))
+            s = 'Tuple[{}]'.format(', '.join(items))
             if len(s) < 400:
                 return s
             else:
-                return 'tuple(length {})'.format(len(items))
+                return '<tuple: {} items>'.format(len(items))
         elif isinstance(typ, TypedDictType):
             # If the TypedDictType is named, return the name
             if not typ.is_anonymous():
-                return self.format_simple(typ.fallback)
+                return self.format_bare(typ.fallback)
             items = []
             for (item_name, item_type) in typ.items.items():
                 modifier = '' if item_name in typ.required_keys else '?'
                 items.append('{!r}{}: {}'.format(item_name,
                                                  modifier,
-                                                 strip_quotes(self.format(item_type))))
-            s = '"TypedDict({{{}}})"'.format(', '.join(items))
+                                                 self.format_bare(item_type)))
+            s = 'TypedDict({{{}}})'.format(', '.join(items))
             return s
         elif isinstance(typ, UnionType):
             # Only print Unions as Optionals if the Optional wouldn't have to contain another Union
@@ -316,20 +286,20 @@ class MessageBuilder:
                                  sum(isinstance(t, NoneTyp) for t in typ.items) == 1)
             if print_as_optional:
                 rest = [t for t in typ.items if not isinstance(t, NoneTyp)]
-                return '"Optional[{}]"'.format(strip_quotes(self.format(rest[0])))
+                return 'Optional[{}]'.format(self.format_bare(rest[0]))
             else:
                 items = []
                 for t in typ.items:
-                    items.append(strip_quotes(self.format(t)))
-                s = '"Union[{}]"'.format(', '.join(items))
+                    items.append(self.format_bare(t))
+                s = 'Union[{}]'.format(', '.join(items))
                 if len(s) < 400:
                     return s
                 else:
-                    return 'union type ({} items)'.format(len(items))
+                    return '<union: {} items>'.format(len(items))
         elif isinstance(typ, NoneTyp):
             return 'None'
         elif isinstance(typ, AnyType):
-            return '"Any"'
+            return 'Any'
         elif isinstance(typ, DeletedType):
             return '<deleted>'
         elif isinstance(typ, UninhabitedType):
@@ -338,25 +308,76 @@ class MessageBuilder:
             else:
                 return '<nothing>'
         elif isinstance(typ, TypeType):
-            return 'Type[{}]'.format(
-                strip_quotes(self.format_simple(typ.item, verbosity)))
+            return 'Type[{}]'.format(self.format_bare(typ.item, verbosity))
+        elif isinstance(typ, ForwardRef):  # may appear in semanal.py
+            return self.format_bare(typ.link, verbosity)
+        elif isinstance(typ, FunctionLike):
+            func = typ
+            if func.is_type_obj():
+                # The type of a type object type can be derived from the
+                # return type (this always works).
+                return self.format_bare(
+                    TypeType.make_normalized(
+                        erase_type(func.items()[0].ret_type)),
+                    verbosity)
+            elif isinstance(func, CallableType):
+                return_type = self.format_bare(func.ret_type)
+                if func.is_ellipsis_args:
+                    return 'Callable[..., {}]'.format(return_type)
+                arg_strings = []
+                for arg_name, arg_type, arg_kind in zip(
+                        func.arg_names, func.arg_types, func.arg_kinds):
+                    if (arg_kind == ARG_POS and arg_name is None
+                            or verbosity == 0 and arg_kind in (ARG_POS, ARG_OPT)):
+
+                        arg_strings.append(
+                            self.format_bare(
+                                arg_type,
+                                verbosity = max(verbosity - 1, 0)))
+                    else:
+                        constructor = ARG_CONSTRUCTOR_NAMES[arg_kind]
+                        if arg_kind in (ARG_STAR, ARG_STAR2) or arg_name is None:
+                            arg_strings.append("{}({})".format(
+                                constructor,
+                                self.format_bare(arg_type)))
+                        else:
+                            arg_strings.append("{}({}, {})".format(
+                                constructor,
+                                self.format_bare(arg_type),
+                                repr(arg_name)))
+
+                return 'Callable[[{}], {}]'.format(", ".join(arg_strings), return_type)
+            else:
+                # Use a simple representation for function types; proper
+                # function types may result in long and difficult-to-read
+                # error messages.
+                return 'overloaded function'
+        elif isinstance(typ, UnboundType):
+            return str(typ)
         elif typ is None:
             raise RuntimeError('Type is None')
         else:
-            # No simple representation for this type that would convey very
-            # useful information. No need to mention the type explicitly in a
-            # message.
-            return ''
+            # Default case; we simply have to return something meaningful here.
+            return 'object'
 
-    def format_distinctly(self, type1: Type, type2: Type) -> Tuple[str, str]:
+    def format_distinctly(self, type1: Type, type2: Type, bare: bool = False) -> Tuple[str, str]:
         """Jointly format a pair of types to distinct strings.
 
         Increase the verbosity of the type strings until they become distinct.
+
+        By default, the returned strings are created using .format() and will be
+        quoted accordingly. If ``bare`` is True, the returned strings will not
+        be quoted; callers who need to do post-processing of the strings before
+        quoting them (such as prepending * or **) should use this.
         """
+        if bare:
+            format_method = self.format_bare
+        else:
+            format_method = self.format
         verbosity = 0
         for verbosity in range(3):
-            str1 = self.format(type1, verbosity=verbosity)
-            str2 = self.format(type2, verbosity=verbosity)
+            str1 = format_method(type1, verbosity=verbosity)
+            str2 = format_method(type2, verbosity=verbosity)
             if str1 != str2:
                 return (str1, str2)
         return (str1, str2)
@@ -447,7 +468,7 @@ class MessageBuilder:
                     typ_format = '"None"'
                 self.fail('Item {} of {} has no attribute "{}"'.format(
                     typ_format, self.format(original_type), member), context)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     def unsupported_operand_types(self, op: str, left_type: Any,
                                   right_type: Any, context: Context) -> None:
@@ -485,12 +506,12 @@ class MessageBuilder:
 
     def not_callable(self, typ: Type, context: Context) -> Type:
         self.fail('{} not callable'.format(self.format(typ)), context)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     def untyped_function_call(self, callee: CallableType, context: Context) -> Type:
         name = callee.name if callee.name is not None else '(unknown)'
         self.fail('Call to untyped function {} in typed context'.format(name), context)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     def incompatible_argument(self, n: int, m: int, callee: CallableType, arg_type: Type,
                               arg_kind: int, context: Context) -> None:
@@ -540,45 +561,83 @@ class MessageBuilder:
             target = 'to {} '.format(name)
 
         msg = ''
+        notes = []  # type: List[str]
         if callee.name == '<list>':
             name = callee.name[1:-1]
             n -= 1
-            msg = '{} item {} has incompatible type {}'.format(
-                name.title(), n, self.format(arg_type))
+            actual_type_str, expected_type_str = self.format_distinctly(arg_type,
+                                                                        callee.arg_types[0])
+            msg = '{} item {} has incompatible type {}; expected {}'.format(
+                name.title(), n, actual_type_str, expected_type_str)
         elif callee.name == '<dict>':
             name = callee.name[1:-1]
             n -= 1
             key_type, value_type = cast(TupleType, arg_type).items
-            msg = '{} entry {} has incompatible type {}: {}'.format(
-                name.title(), n, self.format(key_type), self.format(value_type))
+            expected_key_type, expected_value_type = cast(TupleType, callee.arg_types[0]).items
+
+            # don't increase verbosity unless there is need to do so
+            from mypy.subtypes import is_subtype
+            if is_subtype(key_type, expected_key_type):
+                key_type_str = self.format(key_type)
+                expected_key_type_str = self.format(expected_key_type)
+            else:
+                key_type_str, expected_key_type_str = self.format_distinctly(
+                    key_type, expected_key_type)
+            if is_subtype(value_type, expected_value_type):
+                value_type_str = self.format(value_type)
+                expected_value_type_str = self.format(expected_value_type)
+            else:
+                value_type_str, expected_value_type_str = self.format_distinctly(
+                    value_type, expected_value_type)
+
+            msg = '{} entry {} has incompatible type {}: {}; expected {}: {}'.format(
+                name.title(), n, key_type_str, value_type_str,
+                expected_key_type_str, expected_value_type_str)
         elif callee.name == '<list-comprehension>':
-            msg = 'List comprehension has incompatible type List[{}]'.format(
-                strip_quotes(self.format(arg_type)))
+            actual_type_str, expected_type_str = map(strip_quotes,
+                                                     self.format_distinctly(arg_type,
+                                                                            callee.arg_types[0]))
+            msg = 'List comprehension has incompatible type List[{}]; expected List[{}]'.format(
+                actual_type_str, expected_type_str)
         elif callee.name == '<set-comprehension>':
-            msg = 'Set comprehension has incompatible type Set[{}]'.format(
-                strip_quotes(self.format(arg_type)))
+            actual_type_str, expected_type_str = map(strip_quotes,
+                                                     self.format_distinctly(arg_type,
+                                                                            callee.arg_types[0]))
+            msg = 'Set comprehension has incompatible type Set[{}]; expected Set[{}]'.format(
+                actual_type_str, expected_type_str)
         elif callee.name == '<dictionary-comprehension>':
+            actual_type_str, expected_type_str = self.format_distinctly(arg_type,
+                                                                        callee.arg_types[n - 1])
             msg = ('{} expression in dictionary comprehension has incompatible type {}; '
                    'expected type {}').format(
                 'Key' if n == 1 else 'Value',
-                self.format(arg_type),
-                self.format(callee.arg_types[n - 1]))
+                actual_type_str,
+                expected_type_str)
         elif callee.name == '<generator>':
-            msg = 'Generator has incompatible item type {}'.format(
-                self.format(arg_type))
+            actual_type_str, expected_type_str = self.format_distinctly(arg_type,
+                                                                        callee.arg_types[0])
+            msg = 'Generator has incompatible item type {}; expected {}'.format(
+                actual_type_str, expected_type_str)
         else:
             try:
                 expected_type = callee.arg_types[m - 1]
             except IndexError:  # Varargs callees
                 expected_type = callee.arg_types[-1]
-            arg_type_str, expected_type_str = self.format_distinctly(arg_type, expected_type)
+            arg_type_str, expected_type_str = self.format_distinctly(
+                arg_type, expected_type, bare=True)
             if arg_kind == ARG_STAR:
                 arg_type_str = '*' + arg_type_str
             elif arg_kind == ARG_STAR2:
                 arg_type_str = '**' + arg_type_str
             msg = 'Argument {} {}has incompatible type {}; expected {}'.format(
-                n, target, arg_type_str, expected_type_str)
+                n, target, self.quote_type_string(arg_type_str),
+                self.quote_type_string(expected_type_str))
+            if isinstance(arg_type, Instance) and isinstance(expected_type, Instance):
+                notes = append_invariance_notes(notes, arg_type, expected_type)
         self.fail(msg, context)
+        if notes:
+            for note_msg in notes:
+                self.note(note_msg, context)
 
     def invalid_index_type(self, index_type: Type, expected_type: Type, base_str: str,
                            context: Context) -> None:
@@ -586,7 +645,7 @@ class MessageBuilder:
             self.format(index_type), base_str, self.format(expected_type)), context)
 
     def too_few_arguments(self, callee: CallableType, context: Context,
-                          argument_names: List[str]) -> None:
+                          argument_names: Optional[Sequence[Optional[str]]]) -> None:
         if (argument_names is not None and not all(k is None for k in argument_names)
                 and len(argument_names) >= 1):
             diff = [k for k in callee.arg_names if k not in argument_names]
@@ -640,7 +699,7 @@ class MessageBuilder:
                   format(capitalize(callable_name(callee)),
                          callee.arg_names[index]), context)
 
-    def does_not_return_value(self, callee_type: Type, context: Context) -> None:
+    def does_not_return_value(self, callee_type: Optional[Type], context: Context) -> None:
         """Report an error about use of an unusable type."""
         name = None  # type: Optional[str]
         if isinstance(callee_type, FunctionLike):
@@ -765,6 +824,15 @@ class MessageBuilder:
     def undefined_in_superclass(self, member: str, context: Context) -> None:
         self.fail('"{}" undefined in superclass'.format(member), context)
 
+    def first_argument_for_super_must_be_type(self, actual: Type, context: Context) -> None:
+        if isinstance(actual, Instance):
+            # Don't include type of instance, because it can look confusingly like a type
+            # object.
+            type_str = 'a non-type instance'
+        else:
+            type_str = self.format(actual)
+        self.fail('Argument 1 for "super" must be a type object; got {}'.format(type_str), context)
+
     def too_few_string_formatting_arguments(self, context: Context) -> None:
         self.fail('Not enough arguments for format string', context)
 
@@ -829,10 +897,13 @@ class MessageBuilder:
         self.fail('Property "{}" defined in "{}" is read-only'.format(
             name, type.name()), context)
 
-    def incompatible_typevar_value(self, callee: CallableType, index: int,
-                                   type: Type, context: Context) -> None:
-        self.fail('Type argument {} of {} has incompatible value {}'.format(
-            index, callable_name(callee), self.format(type)), context)
+    def incompatible_typevar_value(self,
+                                   callee: CallableType,
+                                   typ: Type,
+                                   typevar_name: str,
+                                   context: Context) -> None:
+        self.fail(INCOMPATIBLE_TYPEVAR_VALUE.format(typevar_name, callable_name(callee),
+                                                    self.format(typ)), context)
 
     def overloaded_signatures_overlap(self, index1: int, index2: int,
                                       context: Context) -> None:
@@ -869,7 +940,7 @@ class MessageBuilder:
     def yield_from_invalid_operand_type(self, expr: Type, context: Context) -> Type:
         text = self.format(expr) if self.format(expr) != 'object' else expr
         self.fail('"yield from" can\'t be applied to {}'.format(text), context)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     def invalid_signature(self, func_type: Type, context: Context) -> None:
         self.fail('Invalid signature "{}"'.format(func_type), context)
@@ -954,6 +1025,11 @@ class MessageBuilder:
             message = 'Expression type contains "Any" (has type {})'.format(self.format(typ))
         self.fail(message, context)
 
+    def incorrectly_returning_any(self, typ: Type, context: Context) -> None:
+        message = 'Returning Any from function declared to return {}'.format(
+            self.format(typ))
+        self.warn(message, context)
+
     def untyped_decorated_function(self, typ: Type, context: Context) -> None:
         if isinstance(typ, AnyType):
             self.fail("Function is untyped after decorator transformation", context)
@@ -961,6 +1037,264 @@ class MessageBuilder:
             self.fail('Type of decorated function contains type "Any" ({})'.format(
                 self.format(typ)), context)
 
+    def typed_function_untyped_decorator(self, func_name: str, context: Context) -> None:
+        self.fail('Untyped decorator makes function "{}" untyped'.format(func_name), context)
+
+    def bad_proto_variance(self, actual: int, tvar_name: str, expected: int,
+                           context: Context) -> None:
+        msg = capitalize("{} type variable '{}' used in protocol where"
+                         " {} one is expected".format(variance_string(actual),
+                                                      tvar_name,
+                                                      variance_string(expected)))
+        self.fail(msg, context)
+
+    def concrete_only_assign(self, typ: Type, context: Context) -> None:
+        self.fail("Can only assign concrete classes to a variable of type {}"
+                  .format(self.format(typ)), context)
+
+    def concrete_only_call(self, typ: Type, context: Context) -> None:
+        self.fail("Only concrete class can be given where {} is expected"
+                  .format(self.format(typ)), context)
+
+    def report_non_method_protocol(self, tp: TypeInfo, members: List[str],
+                                   context: Context) -> None:
+        self.fail("Only protocols that don't have non-method members can be"
+                  " used with issubclass()", context)
+        if len(members) < 3:
+            attrs = ', '.join(members)
+            self.note('Protocol "{}" has non-method member(s): {}'
+                      .format(tp.name(), attrs), context)
+
+    def note_call(self, subtype: Type, call: Type, context: Context) -> None:
+        self.note('"{}.__call__" has type {}'.format(self.format_bare(subtype),
+                                                     self.format(call, verbosity=1)), context)
+
+    def report_protocol_problems(self, subtype: Union[Instance, TupleType, TypedDictType],
+                                 supertype: Instance, context: Context) -> None:
+        """Report possible protocol conflicts between 'subtype' and 'supertype'.
+
+        This includes missing members, incompatible types, and incompatible
+        attribute flags, such as settable vs read-only or class variable vs
+        instance variable.
+        """
+        from mypy.subtypes import is_subtype, IS_SETTABLE, IS_CLASSVAR, IS_CLASS_OR_STATIC
+        OFFSET = 4  # Four spaces, so that notes will look like this:
+        # note: 'Cls' is missing following 'Proto' members:
+        # note:     method, attr
+        MAX_ITEMS = 2  # Maximum number of conflicts, missing members, and overloads shown
+        # List of special situations where we don't want to report additional problems
+        exclusions = {TypedDictType: ['typing.Mapping'],
+                      TupleType: ['typing.Iterable', 'typing.Sequence'],
+                      Instance: []}  # type: Dict[type, List[str]]
+        if supertype.type.fullname() in exclusions[type(subtype)]:
+            return
+        if any(isinstance(tp, UninhabitedType) for tp in supertype.args):
+            # We don't want to add notes for failed inference (e.g. Iterable[<nothing>]).
+            # This will be only confusing a user even more.
+            return
+
+        if isinstance(subtype, (TupleType, TypedDictType)):
+            if not isinstance(subtype.fallback, Instance):
+                return
+            subtype = subtype.fallback
+
+        # Report missing members
+        missing = get_missing_protocol_members(subtype, supertype)
+        if (missing and len(missing) < len(supertype.type.protocol_members) and
+                len(missing) <= MAX_ITEMS):
+            self.note("'{}' is missing following '{}' protocol member{}:"
+                      .format(subtype.type.name(), supertype.type.name(), plural_s(missing)),
+                      context)
+            self.note(', '.join(missing), context, offset=OFFSET)
+        elif len(missing) > MAX_ITEMS or len(missing) == len(supertype.type.protocol_members):
+            # This is an obviously wrong type: too many missing members
+            return
+
+        # Report member type conflicts
+        conflict_types = get_conflict_protocol_types(subtype, supertype)
+        if conflict_types and (not is_subtype(subtype, erase_type(supertype)) or
+                               not subtype.type.defn.type_vars or
+                               not supertype.type.defn.type_vars):
+            self.note('Following member(s) of {} have '
+                      'conflicts:'.format(self.format(subtype)), context)
+            for name, got, exp in conflict_types[:MAX_ITEMS]:
+                if (not isinstance(exp, (CallableType, Overloaded)) or
+                        not isinstance(got, (CallableType, Overloaded))):
+                    self.note('{}: expected {}, got {}'.format(name,
+                                                               *self.format_distinctly(exp, got)),
+                              context, offset=OFFSET)
+                else:
+                    self.note('Expected:', context, offset=OFFSET)
+                    if isinstance(exp, CallableType):
+                        self.note(self.pretty_callable(exp), context, offset=2 * OFFSET)
+                    else:
+                        assert isinstance(exp, Overloaded)
+                        self.pretty_overload(exp, context, OFFSET, MAX_ITEMS)
+                    self.note('Got:', context, offset=OFFSET)
+                    if isinstance(got, CallableType):
+                        self.note(self.pretty_callable(got), context, offset=2 * OFFSET)
+                    else:
+                        assert isinstance(got, Overloaded)
+                        self.pretty_overload(got, context, OFFSET, MAX_ITEMS)
+            self.print_more(conflict_types, context, OFFSET, MAX_ITEMS)
+
+        # Report flag conflicts (i.e. settable vs read-only etc.)
+        conflict_flags = get_bad_protocol_flags(subtype, supertype)
+        for name, subflags, superflags in conflict_flags[:MAX_ITEMS]:
+            if IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags:
+                self.note('Protocol member {}.{} expected instance variable,'
+                          ' got class variable'.format(supertype.type.name(), name), context)
+            if IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags:
+                self.note('Protocol member {}.{} expected class variable,'
+                          ' got instance variable'.format(supertype.type.name(), name), context)
+            if IS_SETTABLE in superflags and IS_SETTABLE not in subflags:
+                self.note('Protocol member {}.{} expected settable variable,'
+                          ' got read-only attribute'.format(supertype.type.name(), name), context)
+            if IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags:
+                self.note('Protocol member {}.{} expected class or static method'
+                          .format(supertype.type.name(), name), context)
+        self.print_more(conflict_flags, context, OFFSET, MAX_ITEMS)
+
+    def pretty_overload(self, tp: Overloaded, context: Context,
+                        offset: int, max_items: int) -> None:
+        for item in tp.items()[:max_items]:
+            self.note('@overload', context, offset=2 * offset)
+            self.note(self.pretty_callable(item), context, offset=2 * offset)
+        if len(tp.items()) > max_items:
+            self.note('<{} more overload(s) not shown>'.format(len(tp.items()) - max_items),
+                      context, offset=2 * offset)
+
+    def print_more(self, conflicts: Sequence[Any], context: Context,
+                   offset: int, max_items: int) -> None:
+        if len(conflicts) > max_items:
+            self.note('<{} more conflict(s) not shown>'
+                      .format(len(conflicts) - max_items),
+                      context, offset=offset)
+
+    def pretty_callable(self, tp: CallableType) -> str:
+        """Return a nice easily-readable representation of a callable type.
+        For example:
+            def [T <: int] f(self, x: int, y: T) -> None
+        """
+        s = ''
+        asterisk = False
+        for i in range(len(tp.arg_types)):
+            if s:
+                s += ', '
+            if tp.arg_kinds[i] in (ARG_NAMED, ARG_NAMED_OPT) and not asterisk:
+                s += '*, '
+                asterisk = True
+            if tp.arg_kinds[i] == ARG_STAR:
+                s += '*'
+                asterisk = True
+            if tp.arg_kinds[i] == ARG_STAR2:
+                s += '**'
+            name = tp.arg_names[i]
+            if name:
+                s += name + ': '
+            s += self.format_bare(tp.arg_types[i])
+            if tp.arg_kinds[i] in (ARG_OPT, ARG_NAMED_OPT):
+                s += ' = ...'
+
+        # If we got a "special arg" (i.e: self, cls, etc...), prepend it to the arg list
+        if tp.definition is not None and tp.definition.name() is not None:
+            definition_args = getattr(tp.definition, 'arg_names')
+            if definition_args and tp.arg_names != definition_args \
+                    and len(definition_args) > 0:
+                if s:
+                    s = ', ' + s
+                s = definition_args[0] + s
+            s = '{}({})'.format(tp.definition.name(), s)
+        else:
+            s = '({})'.format(s)
+
+        s += ' -> ' + self.format_bare(tp.ret_type)
+        if tp.variables:
+            tvars = []
+            for tvar in tp.variables:
+                if (tvar.upper_bound and isinstance(tvar.upper_bound, Instance) and
+                        tvar.upper_bound.type.fullname() != 'builtins.object'):
+                    tvars.append('{} <: {}'.format(tvar.name,
+                                                   self.format_bare(tvar.upper_bound)))
+                elif tvar.values:
+                    tvars.append('{} in ({})'
+                                 .format(tvar.name, ', '.join([self.format_bare(tp)
+                                                               for tp in tvar.values])))
+                else:
+                    tvars.append(tvar.name)
+            s = '[{}] {}'.format(', '.join(tvars), s)
+        return 'def {}'.format(s)
+
+
+def variance_string(variance: int) -> str:
+    if variance == COVARIANT:
+        return 'covariant'
+    elif variance == CONTRAVARIANT:
+        return 'contravariant'
+    else:
+        return 'invariant'
+
+
+def get_missing_protocol_members(left: Instance, right: Instance) -> List[str]:
+    """Find all protocol members of 'right' that are not implemented
+    (i.e. completely missing) in 'left'.
+    """
+    from mypy.subtypes import find_member
+    assert right.type.is_protocol
+    missing = []  # type: List[str]
+    for member in right.type.protocol_members:
+        if not find_member(member, left, left):
+            missing.append(member)
+    return missing
+
+
+def get_conflict_protocol_types(left: Instance, right: Instance) -> List[Tuple[str, Type, Type]]:
+    """Find members that are defined in 'left' but have incompatible types.
+    Return them as a list of ('member', 'got', 'expected').
+    """
+    from mypy.subtypes import find_member, is_subtype, get_member_flags, IS_SETTABLE
+    assert right.type.is_protocol
+    conflicts = []  # type: List[Tuple[str, Type, Type]]
+    for member in right.type.protocol_members:
+        if member in ('__init__', '__new__'):
+            continue
+        supertype = find_member(member, right, left)
+        assert supertype is not None
+        subtype = find_member(member, left, left)
+        if not subtype:
+            continue
+        is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True)
+        if IS_SETTABLE in get_member_flags(member, right.type):
+            is_compat = is_compat and is_subtype(supertype, subtype)
+        if not is_compat:
+            conflicts.append((member, subtype, supertype))
+    return conflicts
+
+
+def get_bad_protocol_flags(left: Instance, right: Instance
+                           ) -> List[Tuple[str, Set[int], Set[int]]]:
+    """Return all incompatible attribute flags for members that are present in both
+    'left' and 'right'.
+    """
+    from mypy.subtypes import (find_member, get_member_flags,
+                               IS_SETTABLE, IS_CLASSVAR, IS_CLASS_OR_STATIC)
+    assert right.type.is_protocol
+    all_flags = []  # type: List[Tuple[str, Set[int], Set[int]]]
+    for member in right.type.protocol_members:
+        if find_member(member, left, left):
+            item = (member,
+                    get_member_flags(member, left.type),
+                    get_member_flags(member, right.type))
+            all_flags.append(item)
+    bad_flags = []
+    for name, subflags, superflags in all_flags:
+        if (IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags or
+                IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags or
+                IS_SETTABLE in superflags and IS_SETTABLE not in subflags or
+                IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags):
+            bad_flags.append((name, subflags, superflags))
+    return bad_flags
+
 
 def capitalize(s: str) -> str:
     """Capitalize the first character of a string."""
@@ -994,22 +1328,23 @@ def plural_s(s: Sequence[Any]) -> str:
 
 
 def format_string_list(s: Iterable[str]) -> str:
-    l = list(s)
-    assert len(l) > 0
-    if len(l) == 1:
-        return l[0]
-    elif len(l) <= 5:
-        return '%s and %s' % (', '.join(l[:-1]), l[-1])
+    lst = list(s)
+    assert len(lst) > 0
+    if len(lst) == 1:
+        return lst[0]
+    elif len(lst) <= 5:
+        return '%s and %s' % (', '.join(lst[:-1]), lst[-1])
     else:
-        return '%s, ... and %s (%i methods suppressed)' % (', '.join(l[:2]), l[-1], len(l) - 3)
+        return '%s, ... and %s (%i methods suppressed)' % (
+            ', '.join(lst[:2]), lst[-1], len(lst) - 3)
 
 
 def format_item_name_list(s: Iterable[str]) -> str:
-    l = list(s)
-    if len(l) <= 5:
-        return '(' + ', '.join(["'%s'" % name for name in l]) + ')'
+    lst = list(s)
+    if len(lst) <= 5:
+        return '(' + ', '.join(["'%s'" % name for name in lst]) + ')'
     else:
-        return '(' + ', '.join(["'%s'" % name for name in l[:5]]) + ', ...)'
+        return '(' + ', '.join(["'%s'" % name for name in lst[:5]]) + ', ...)'
 
 
 def callable_name(type: CallableType) -> str:
@@ -1060,6 +1395,33 @@ def pretty_or(args: List[str]) -> str:
     return ", ".join(quoted[:-1]) + ", or " + quoted[-1]
 
 
+def append_invariance_notes(notes: List[str], arg_type: Instance,
+                            expected_type: Instance) -> List[str]:
+    """Explain that the type is invariant and give notes for how to solve the issue."""
+    from mypy.subtypes import is_subtype
+    from mypy.sametypes import is_same_type
+    invariant_type = ''
+    covariant_suggestion = ''
+    if (arg_type.type.fullname() == 'builtins.list' and
+            expected_type.type.fullname() == 'builtins.list' and
+            is_subtype(arg_type.args[0], expected_type.args[0])):
+        invariant_type = 'List'
+        covariant_suggestion = 'Consider using "Sequence" instead, which is covariant'
+    elif (arg_type.type.fullname() == 'builtins.dict' and
+          expected_type.type.fullname() == 'builtins.dict' and
+          is_same_type(arg_type.args[0], expected_type.args[0]) and
+          is_subtype(arg_type.args[1], expected_type.args[1])):
+        invariant_type = 'Dict'
+        covariant_suggestion = ('Consider using "Mapping" instead, '
+                                'which is covariant in the value type')
+    if invariant_type and covariant_suggestion:
+        notes.append(
+            '"{}" is invariant -- see '.format(invariant_type) +
+            'http://mypy.readthedocs.io/en/latest/common_issues.html#variance')
+        notes.append(covariant_suggestion)
+    return notes
+
+
 def make_inferred_type_note(context: Context, subtype: Type,
                             supertype: Type, supertype_str: str) -> str:
     """Explain that the user may have forgotten to type a variable.
diff --git a/mypy/myunit/__init__.py b/mypy/myunit/__init__.py
index bf47b1e..92ba802 100644
--- a/mypy/myunit/__init__.py
+++ b/mypy/myunit/__init__.py
@@ -6,7 +6,7 @@ import tempfile
 import time
 import traceback
 
-from typing import List, Tuple, Any, Callable, Union, cast
+from typing import List, Tuple, Any, Callable, Union, cast, Optional
 from types import TracebackType
 
 
@@ -19,7 +19,7 @@ times = []  # type: List[Tuple[float, str]]
 
 class AssertionFailure(Exception):
     """Exception used to signal failed test cases."""
-    def __init__(self, s: str = None) -> None:
+    def __init__(self, s: Optional[str] = None) -> None:
         if s:
             super().__init__(s)
         else:
@@ -31,12 +31,12 @@ class SkipTestCaseException(Exception):
     pass
 
 
-def assert_true(b: bool, msg: str = None) -> None:
+def assert_true(b: bool, msg: Optional[str] = None) -> None:
     if not b:
         raise AssertionFailure(msg)
 
 
-def assert_false(b: bool, msg: str = None) -> None:
+def assert_false(b: bool, msg: Optional[str] = None) -> None:
     if b:
         raise AssertionFailure(msg)
 
@@ -72,7 +72,7 @@ def assert_raises(typ: type, *rest: Any) -> None:
     TODO use overloads for better type checking
     """
     # Parse arguments.
-    msg = None  # type: str
+    msg = None  # type: Optional[str]
     if isinstance(rest[0], str) or rest[0] is None:
         msg = rest[0]
         rest = rest[1:]
@@ -106,13 +106,13 @@ def fail() -> None:
 
 
 class TestCase:
-    def __init__(self, name: str, suite: 'Suite' = None,
-                 func: Callable[[], None] = None) -> None:
+    def __init__(self, name: str, suite: 'Optional[Suite]' = None,
+                 func: Optional[Callable[[], None]] = None) -> None:
         self.func = func
         self.name = name
         self.suite = suite
-        self.old_cwd = None  # type: str
-        self.tmpdir = None  # type: tempfile.TemporaryDirectory
+        self.old_cwd = None  # type: Optional[str]
+        self.tmpdir = None  # type: Optional[tempfile.TemporaryDirectory[str]]
 
     def run(self) -> None:
         if self.func:
@@ -129,6 +129,8 @@ class TestCase:
     def tear_down(self) -> None:
         if self.suite:
             self.suite.tear_down()
+        assert self.old_cwd is not None and self.tmpdir is not None, \
+            "test was not properly set up"
         os.chdir(self.old_cwd)
         try:
             self.tmpdir.cleanup()
@@ -198,7 +200,7 @@ class ListSuite(Suite):
         super().__init__()
 
 
-def main(args: List[str] = None) -> None:
+def main(args: Optional[List[str]] = None) -> None:
     global patterns, is_verbose, is_quiet
     if not args:
         args = sys.argv[1:]
@@ -265,10 +267,9 @@ def run_test_recursive(test: Any, num_total: int, num_fail: int, num_skip: int,
             if is_skip: num_skip += 1
             num_total += 1
     else:
-        suite = None  # type: Suite
         suite_prefix = ''
         if isinstance(test, list) or isinstance(test, tuple):
-            suite = test[1]
+            suite = test[1]  # type: Suite
             suite_prefix = test[0]
         else:
             suite = test
@@ -307,6 +308,7 @@ def run_single_test(name: str, test: Any) -> Tuple[bool, bool]:
                 sys.stderr.write(' (skipped)\n')
             return False, True
         else:
+            assert exc_type is not None and exc_value is not None
             handle_failure(name, exc_type, exc_value, exc_traceback)
             return True, False
     elif is_verbose:
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 796e809..12bfa0d 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -3,14 +3,13 @@
 import os
 from abc import abstractmethod
 from collections import OrderedDict
-
 from typing import (
     Any, TypeVar, List, Tuple, cast, Set, Dict, Union, Optional, Callable,
 )
 
 import mypy.strconv
-from mypy.visitor import NodeVisitor, StatementVisitor, ExpressionVisitor
 from mypy.util import short_type
+from mypy.visitor import NodeVisitor, StatementVisitor, ExpressionVisitor
 
 
 class Context:
@@ -23,7 +22,7 @@ class Context:
         self.line = line
         self.column = column
 
-    def set_line(self, target: Union['Context', int], column: int = None) -> None:
+    def set_line(self, target: Union['Context', int], column: Optional[int] = None) -> None:
         """If target is a node, pull line (and column) information
         into this node. If column is specified, this will override any column
         information coming from a node.
@@ -130,10 +129,6 @@ nongen_builtins.update(reverse_type_aliases)
 nongen_builtins.update(reverse_collection_aliases)
 
 
-# See [Note Literals and literal_hash] below
-Key = tuple
-
-
 class Node(Context):
     """Common base class for all non-type parse tree nodes."""
 
@@ -155,9 +150,6 @@ class Statement(Node):
 
 class Expression(Node):
     """An expression node."""
-    literal = LITERAL_NO
-    literal_hash = None  # type: Optional[Key]
-
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         raise RuntimeError('Not implemented')
 
@@ -168,44 +160,6 @@ class Expression(Node):
 Lvalue = Expression
 
 
-# [Note Literals and literal_hash]
-# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-#
-# Mypy uses the term "literal" to refer to any expression built out of
-# the following:
-#
-# * Plain literal expressions, like `1` (integer, float, string, etc.)
-#
-# * Compound literal expressions, like `(lit1, lit2)` (list, dict,
-#   set, or tuple)
-#
-# * Operator expressions, like `lit1 + lit2`
-#
-# * Variable references, like `x`
-#
-# * Member references, like `lit.m`
-#
-# * Index expressions, like `lit[0]`
-#
-# A typical "literal" looks like `x[(i,j+1)].m`.
-#
-# An expression that is a literal has a `literal_hash`, with the
-# following properties.
-#
-# * `literal_hash` is a Key: a tuple containing basic data types and
-#   possibly other Keys. So it can be used as a key in a dictionary
-#   that will be compared by value (as opposed to the Node itself,
-#   which is compared by identity).
-#
-# * Two expressions have equal `literal_hash`es if and only if they
-#   are syntactically equal expressions. (NB: Actually, we also
-#   identify as equal expressions like `3` and `3.0`; is this a good
-#   idea?)
-#
-# * The elements of `literal_hash` that are tuples are exactly the
-#   subexpressions of the original expression (e.g. the base and index
-#   of an index expression, or the operands of an operator expression).
-
 class SymbolNode(Node):
     # Nodes that can be stored in a symbol table.
 
@@ -254,7 +208,7 @@ class MypyFile(SymbolNode):
                  defs: List[Statement],
                  imports: List['ImportBase'],
                  is_bom: bool = False,
-                 ignored_lines: Set[int] = None) -> None:
+                 ignored_lines: Optional[Set[int]] = None) -> None:
         self.defs = defs
         self.line = 1  # Dummy line number
         self.imports = imports
@@ -445,7 +399,7 @@ class Argument(Node):
 
     def __init__(self, variable: 'Var', type_annotation: 'Optional[mypy.types.Type]',
             initializer: Optional[Expression], kind: int,
-            initialization_statement: Optional['AssignmentStmt'] = None) -> None:
+            initialization_statement: 'Optional[AssignmentStmt]' = None) -> None:
         self.variable = variable
 
         self.type_annotation = type_annotation
@@ -457,7 +411,7 @@ class Argument(Node):
 
         self.kind = kind
 
-    def _initialization_statement(self) -> Optional['AssignmentStmt']:
+    def _initialization_statement(self) -> 'Optional[AssignmentStmt]':
         """Convert the initializer into an assignment statement.
         """
         if not self.initializer:
@@ -468,7 +422,7 @@ class Argument(Node):
         assign = AssignmentStmt([lvalue], rvalue)
         return assign
 
-    def set_line(self, target: Union[Context, int], column: int = None) -> None:
+    def set_line(self, target: Union[Context, int], column: Optional[int] = None) -> None:
         super().set_line(target, column)
 
         if self.initializer:
@@ -507,7 +461,7 @@ class FuncItem(FuncBase):
     ]
 
     def __init__(self, arguments: List[Argument], body: 'Block',
-                 typ: 'mypy.types.FunctionLike' = None) -> None:
+                 typ: 'Optional[mypy.types.FunctionLike]' = None) -> None:
         self.arguments = arguments
         self.arg_names = [arg.variable.name() for arg in self.arguments]
         self.arg_kinds = [arg.kind for arg in self.arguments]
@@ -525,7 +479,7 @@ class FuncItem(FuncBase):
     def max_fixed_argc(self) -> int:
         return self.max_pos
 
-    def set_line(self, target: Union[Context, int], column: int = None) -> None:
+    def set_line(self, target: Union[Context, int], column: Optional[int] = None) -> None:
         super().set_line(target, column)
         for arg in self.arguments:
             arg.set_line(self.line, self.column)
@@ -554,7 +508,7 @@ class FuncDef(FuncItem, SymbolNode, Statement):
                  name: str,              # Function name
                  arguments: List[Argument],
                  body: 'Block',
-                 typ: 'mypy.types.FunctionLike' = None) -> None:
+                 typ: 'Optional[mypy.types.FunctionLike]' = None) -> None:
         super().__init__(arguments, body, typ)
         self._name = name
 
@@ -611,7 +565,7 @@ class Decorator(SymbolNode, Statement):
     """
 
     func = None  # type: FuncDef                # Decorated function
-    decorators = None  # type: List[Expression] # Decorators, at least one  # XXX Not true
+    decorators = None  # type: List[Expression] # Decorators (may be empty)
     var = None  # type: Var                     # Represents the decorated function obj
     is_overload = False
 
@@ -628,6 +582,10 @@ class Decorator(SymbolNode, Statement):
     def fullname(self) -> str:
         return self.func.fullname()
 
+    @property
+    def info(self) -> 'TypeInfo':
+        return self.func.info
+
     def accept(self, visitor: StatementVisitor[T]) -> T:
         return visitor.visit_decorator(self)
 
@@ -669,6 +627,7 @@ class Var(SymbolNode):
     is_property = False
     is_settable_property = False
     is_classvar = False
+    is_abstract_var = False
     # Set to true when this variable refers to a module we were unable to
     # parse for some reason (eg a silenced module)
     is_suppressed_import = False
@@ -676,10 +635,10 @@ class Var(SymbolNode):
     FLAGS = [
         'is_self', 'is_ready', 'is_initialized_in_class', 'is_staticmethod',
         'is_classmethod', 'is_property', 'is_settable_property', 'is_suppressed_import',
-        'is_classvar'
+        'is_classvar', 'is_abstract_var'
     ]
 
-    def __init__(self, name: str, type: 'mypy.types.Type' = None) -> None:
+    def __init__(self, name: str, type: 'Optional[mypy.types.Type]' = None) -> None:
         self._name = name
         self.type = type
         if self.type is None:
@@ -729,7 +688,7 @@ class ClassDef(Statement):
     # Base class expressions (not semantically analyzed -- can be arbitrary expressions)
     base_type_exprs = None  # type: List[Expression]
     info = None  # type: TypeInfo  # Related TypeInfo
-    metaclass = ''  # type: Optional[str]
+    metaclass = None  # type: Optional[Expression]
     decorators = None  # type: List[Expression]
     keywords = None  # type: OrderedDict[str, Expression]
     analyzed = None  # type: Optional[Expression]
@@ -738,10 +697,10 @@ class ClassDef(Statement):
     def __init__(self,
                  name: str,
                  defs: 'Block',
-                 type_vars: List['mypy.types.TypeVarDef'] = None,
-                 base_type_exprs: List[Expression] = None,
-                 metaclass: str = None,
-                 keywords: List[Tuple[str, Expression]] = None) -> None:
+                 type_vars: Optional[List['mypy.types.TypeVarDef']] = None,
+                 base_type_exprs: Optional[List[Expression]] = None,
+                 metaclass: Optional[Expression] = None,
+                 keywords: Optional[List[Tuple[str, Expression]]] = None) -> None:
         self.name = name
         self.defs = defs
         self.type_vars = type_vars or []
@@ -757,12 +716,12 @@ class ClassDef(Statement):
         return self.info.is_generic()
 
     def serialize(self) -> JsonDict:
-        # Not serialized: defs, base_type_exprs, decorators, analyzed (for named tuples etc.)
+        # Not serialized: defs, base_type_exprs, metaclass, decorators,
+        # analyzed (for named tuples etc.)
         return {'.class': 'ClassDef',
                 'name': self.name,
                 'fullname': self.fullname,
                 'type_vars': [v.serialize() for v in self.type_vars],
-                'metaclass': self.metaclass,
                 }
 
     @classmethod
@@ -771,7 +730,6 @@ class ClassDef(Statement):
         res = ClassDef(data['name'],
                        Block([]),
                        [mypy.types.TypeVarDef.deserialize(v) for v in data['type_vars']],
-                       metaclass=data['metaclass'],
                        )
         res.fullname = data['fullname']
         return res
@@ -848,7 +806,7 @@ class AssignmentStmt(Statement):
     new_syntax = False  # type: bool
 
     def __init__(self, lvalues: List[Lvalue], rvalue: Expression,
-                 type: 'mypy.types.Type' = None, new_syntax: bool = False) -> None:
+                 type: 'Optional[mypy.types.Type]' = None, new_syntax: bool = False) -> None:
         self.lvalues = lvalues
         self.rvalue = rvalue
         self.type = type
@@ -900,8 +858,12 @@ class ForStmt(Statement):
     else_body = None  # type: Optional[Block]
     is_async = False  # True if `async for ...` (PEP 492, Python 3.5)
 
-    def __init__(self, index: Lvalue, expr: Expression, body: Block,
-                 else_body: Optional[Block], index_type: 'mypy.types.Type' = None) -> None:
+    def __init__(self,
+                 index: Lvalue,
+                 expr: Expression,
+                 body: Block,
+                 else_body: Optional[Block],
+                 index_type: 'Optional[mypy.types.Type]' = None) -> None:
         self.index = index
         self.index_type = index_type
         self.expr = expr
@@ -926,7 +888,7 @@ class AssertStmt(Statement):
     expr = None  # type: Expression
     msg = None  # type: Optional[Expression]
 
-    def __init__(self, expr: Expression, msg: Expression = None) -> None:
+    def __init__(self, expr: Expression, msg: Optional[Expression] = None) -> None:
         self.expr = expr
         self.msg = msg
 
@@ -996,7 +958,7 @@ class TryStmt(Statement):
     else_body = None  # type: Optional[Block]
     finally_body = None  # type: Optional[Block]
 
-    def __init__(self, body: Block, vars: List[Optional['NameExpr']],
+    def __init__(self, body: Block, vars: List['Optional[NameExpr]'],
                  types: List[Optional[Expression]],
                  handlers: List[Block], else_body: Optional[Block],
                  finally_body: Optional[Block]) -> None:
@@ -1020,7 +982,7 @@ class WithStmt(Statement):
     is_async = False  # True if `async with ...` (PEP 492, Python 3.5)
 
     def __init__(self, expr: List[Expression], target: List[Optional[Lvalue]],
-                 body: Block, target_type: 'mypy.types.Type' = None) -> None:
+                 body: Block, target_type: 'Optional[mypy.types.Type]' = None) -> None:
         self.expr = expr
         self.target = target
         self.target_type = target_type
@@ -1038,7 +1000,10 @@ class PrintStmt(Statement):
     # The file-like target object (given using >>).
     target = None  # type: Optional[Expression]
 
-    def __init__(self, args: List[Expression], newline: bool, target: Expression = None) -> None:
+    def __init__(self,
+                 args: List[Expression],
+                 newline: bool,
+                 target: Optional[Expression] = None) -> None:
         self.args = args
         self.newline = newline
         self.target = target
@@ -1072,11 +1037,9 @@ class IntExpr(Expression):
     """Integer literal"""
 
     value = 0
-    literal = LITERAL_YES
 
     def __init__(self, value: int) -> None:
         self.value = value
-        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_int_expr(self)
@@ -1097,11 +1060,9 @@ class StrExpr(Expression):
     """String literal"""
 
     value = ''
-    literal = LITERAL_YES
 
     def __init__(self, value: str) -> None:
         self.value = value
-        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_str_expr(self)
@@ -1111,11 +1072,9 @@ class BytesExpr(Expression):
     """Bytes literal"""
 
     value = ''  # TODO use bytes
-    literal = LITERAL_YES
 
     def __init__(self, value: str) -> None:
         self.value = value
-        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_bytes_expr(self)
@@ -1125,11 +1084,9 @@ class UnicodeExpr(Expression):
     """Unicode literal (Python 2.x)"""
 
     value = ''  # TODO use bytes
-    literal = LITERAL_YES
 
     def __init__(self, value: str) -> None:
         self.value = value
-        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_unicode_expr(self)
@@ -1139,11 +1096,9 @@ class FloatExpr(Expression):
     """Float literal"""
 
     value = 0.0
-    literal = LITERAL_YES
 
     def __init__(self, value: float) -> None:
         self.value = value
-        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_float_expr(self)
@@ -1153,11 +1108,9 @@ class ComplexExpr(Expression):
     """Complex literal"""
 
     value = 0.0j
-    literal = LITERAL_YES
 
     def __init__(self, value: complex) -> None:
         self.value = value
-        self.literal_hash = ('Literal', value)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_complex_expr(self)
@@ -1177,8 +1130,6 @@ class StarExpr(Expression):
 
     def __init__(self, expr: Expression) -> None:
         self.expr = expr
-        self.literal = self.expr.literal
-        self.literal_hash = ('Star', expr.literal_hash,)
 
         # Whether this starred expression is used in a tuple/list and as lvalue
         self.valid = False
@@ -1190,9 +1141,9 @@ class StarExpr(Expression):
 class RefExpr(Expression):
     """Abstract base class for name-like constructs"""
 
-    kind = None  # type: int      # LDEF/GDEF/MDEF/... (None if not available)
+    kind = None  # type: Optional[int]      # LDEF/GDEF/MDEF/... (None if not available)
     node = None  # type: Optional[SymbolNode]  # Var, FuncDef or TypeInfo that describes this
-    fullname = None  # type: str  # Fully qualified name (or name if not global)
+    fullname = None  # type: Optional[str]  # Fully qualified name (or name if not global)
 
     # Does this define a new name with inferred type?
     #
@@ -1209,11 +1160,8 @@ class NameExpr(RefExpr):
 
     name = None  # type: str      # Name referred to (may be qualified)
 
-    literal = LITERAL_TYPE
-
     def __init__(self, name: str) -> None:
         self.name = name
-        self.literal_hash = ('Var', name,)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_name_expr(self)
@@ -1227,7 +1175,6 @@ class NameExpr(RefExpr):
                 'fullname': self.fullname,
                 'is_def': self.is_def,
                 'name': self.name,
-                'literal': self.literal,
                 }
 
     @classmethod
@@ -1238,7 +1185,6 @@ class NameExpr(RefExpr):
         ret.node = None if data['node'] is None else SymbolNode.deserialize(data['node'])
         ret.fullname = data['fullname']
         ret.is_def = data['is_def']
-        ret.literal = data['literal']
         return ret
 
 
@@ -1247,14 +1193,13 @@ class MemberExpr(RefExpr):
 
     expr = None  # type: Expression
     name = None  # type: str
-    # The variable node related to a definition.
-    def_var = None  # type: Var
+    # The variable node related to a definition through 'self.x = <initializer>'.
+    # The nodes of other kinds of member expressions are resolved during type checking.
+    def_var = None  # type: Optional[Var]
 
     def __init__(self, expr: Expression, name: str) -> None:
         self.expr = expr
         self.name = name
-        self.literal = self.expr.literal
-        self.literal_hash = ('Member', expr.literal_hash, name)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_member_expr(self)
@@ -1292,8 +1237,12 @@ class CallExpr(Expression):
     # cast(...) this is a CastExpr.
     analyzed = None  # type: Optional[Expression]
 
-    def __init__(self, callee: Expression, args: List[Expression], arg_kinds: List[int],
-                 arg_names: List[Optional[str]] = None, analyzed: Expression = None) -> None:
+    def __init__(self,
+                 callee: Expression,
+                 args: List[Expression],
+                 arg_kinds: List[int],
+                 arg_names: List[Optional[str]],
+                 analyzed: Optional[Expression] = None) -> None:
         if not arg_names:
             arg_names = [None] * len(args)
 
@@ -1345,10 +1294,6 @@ class IndexExpr(Expression):
         self.base = base
         self.index = index
         self.analyzed = None
-        if self.index.literal == LITERAL_YES:
-            self.literal = self.base.literal
-            self.literal_hash = ('Index', base.literal_hash,
-                                 index.literal_hash)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_index_expr(self)
@@ -1365,8 +1310,6 @@ class UnaryExpr(Expression):
     def __init__(self, op: str, expr: Expression) -> None:
         self.op = op
         self.expr = expr
-        self.literal = self.expr.literal
-        self.literal_hash = ('Unary', op, expr.literal_hash)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_unary_expr(self)
@@ -1452,8 +1395,6 @@ class OpExpr(Expression):
         self.op = op
         self.left = left
         self.right = right
-        self.literal = min(self.left.literal, self.right.literal)
-        self.literal_hash = ('Binary', op, left.literal_hash, right.literal_hash)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_op_expr(self)
@@ -1471,9 +1412,6 @@ class ComparisonExpr(Expression):
         self.operators = operators
         self.operands = operands
         self.method_types = []
-        self.literal = min(o.literal for o in self.operands)
-        self.literal_hash = ((cast(Any, 'Comparison'),) + tuple(operators) +
-                             tuple(o.literal_hash for o in operands))
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_comparison_expr(self)
@@ -1531,9 +1469,11 @@ class SuperExpr(Expression):
 
     name = ''
     info = None  # type: TypeInfo  # Type that contains this super expression
+    call = None  # type: CallExpr  # The expression super(...)
 
-    def __init__(self, name: str) -> None:
+    def __init__(self, name: str, call: CallExpr) -> None:
         self.name = name
+        self.call = call
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_super_expr(self)
@@ -1563,9 +1503,6 @@ class ListExpr(Expression):
 
     def __init__(self, items: List[Expression]) -> None:
         self.items = items
-        if all(x.literal == LITERAL_YES for x in items):
-            self.literal = LITERAL_YES
-            self.literal_hash = (cast(Any, 'List'),) + tuple(x.literal_hash for x in items)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_list_expr(self)
@@ -1578,13 +1515,6 @@ class DictExpr(Expression):
 
     def __init__(self, items: List[Tuple[Expression, Expression]]) -> None:
         self.items = items
-        # key is None for **item, e.g. {'a': 1, **x} has
-        # keys ['a', None] and values [1, x].
-        if all(x[0] and x[0].literal == LITERAL_YES and x[1].literal == LITERAL_YES
-               for x in items):
-            self.literal = LITERAL_YES
-            self.literal_hash = (cast(Any, 'Dict'),) + tuple(
-                (x[0].literal_hash, x[1].literal_hash) for x in items)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_dict_expr(self)
@@ -1597,9 +1527,6 @@ class TupleExpr(Expression):
 
     def __init__(self, items: List[Expression]) -> None:
         self.items = items
-        if all(x.literal == LITERAL_YES for x in items):
-            self.literal = LITERAL_YES
-            self.literal_hash = (cast(Any, 'Tuple'),) + tuple(x.literal_hash for x in items)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_tuple_expr(self)
@@ -1612,9 +1539,6 @@ class SetExpr(Expression):
 
     def __init__(self, items: List[Expression]) -> None:
         self.items = items
-        if all(x.literal == LITERAL_YES for x in items):
-            self.literal = LITERAL_YES
-            self.literal_hash = (cast(Any, 'Set'),) + tuple(x.literal_hash for x in items)
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_set_expr(self)
@@ -1807,13 +1731,13 @@ class TypeAliasExpr(Expression):
     type = None  # type: mypy.types.Type
     # Simple fallback type for aliases that are invalid in runtime expressions
     # (for example Union, Tuple, Callable).
-    fallback = None  # type: Optional[mypy.types.Type]
+    fallback = None  # type: mypy.types.Type
     # This type alias is subscripted in a runtime expression like Alias[int](42)
     # (not in a type context like type annotation or base class).
     in_runtime = False  # type: bool
 
     def __init__(self, type: 'mypy.types.Type', tvars: List[str],
-                 fallback: 'mypy.types.Type' = None, in_runtime: bool = False) -> None:
+                 fallback: 'mypy.types.Type', in_runtime: bool = False) -> None:
         self.type = type
         self.fallback = fallback
         self.in_runtime = in_runtime
@@ -1920,9 +1844,13 @@ class TempNode(Expression):
     """
 
     type = None  # type: mypy.types.Type
+    # Is this TempNode used to indicate absence of a right hand side in an annotated assignment?
+    # (e.g. for 'x: int' the rvalue is TempNode(AnyType(TypeOfAny.special_form), no_rhs=True))
+    no_rhs = False  # type: bool
 
-    def __init__(self, typ: 'mypy.types.Type') -> None:
+    def __init__(self, typ: 'mypy.types.Type', no_rhs: bool = False) -> None:
         self.type = typ
+        self.no_rhs = no_rhs
 
     def __repr__(self) -> str:
         return 'TempNode(%s)' % str(self.type)
@@ -1960,7 +1888,53 @@ class TypeInfo(SymbolNode):
     subtypes = None  # type: Set[TypeInfo] # Direct subclasses encountered so far
     names = None  # type: SymbolTable      # Names defined directly in this type
     is_abstract = False                    # Does the class have any abstract attributes?
+    is_protocol = False                    # Is this a protocol class?
+    runtime_protocol = False               # Does this protocol support isinstance checks?
     abstract_attributes = None  # type: List[str]
+    # Protocol members are names of all attributes/methods defined in a protocol
+    # and in all its supertypes (except for 'object').
+    protocol_members = None  # type: List[str]
+
+    # The attributes 'assuming' and 'assuming_proper' represent structural subtype matrices.
+    #
+    # In languages with structural subtyping, one can keep a global subtype matrix like this:
+    #   . A B C .
+    #   A 1 0 0
+    #   B 1 1 1
+    #   C 1 0 1
+    #   .
+    # where 1 indicates that the type in corresponding row is a subtype of the type
+    # in corresponding column. This matrix typically starts filled with all 1's and
+    # a typechecker tries to "disprove" every subtyping relation using atomic (or nominal) types.
+    # However, we don't want to keep this huge global state. Instead, we keep the subtype
+    # information in the form of list of pairs (subtype, supertype) shared by all 'Instance's
+    # with given supertype's TypeInfo. When we enter a subtype check we push a pair in this list
+    # thus assuming that we started with 1 in corresponding matrix element. Such algorithm allows
+    # to treat recursive and mutually recursive protocols and other kinds of complex situations.
+    #
+    # If concurrent/parallel type checking will be added in future,
+    # then there should be one matrix per thread/process to avoid false negatives
+    # during the type checking phase.
+    assuming = None  # type: List[Tuple[mypy.types.Instance, mypy.types.Instance]]
+    assuming_proper = None  # type: List[Tuple[mypy.types.Instance, mypy.types.Instance]]
+    # Ditto for temporary 'inferring' stack of recursive constraint inference.
+    # It contains Instance's of protocol types that appeared as an argument to
+    # constraints.infer_constraints(). We need 'inferring' to avoid infinite recursion for
+    # recursive and mutually recursive protocols.
+    #
+    # We make 'assuming' and 'inferring' attributes here instead of passing they as kwargs,
+    # since this would require to pass them in many dozens of calls. In particular,
+    # there is a dependency infer_constraint -> is_subtype -> is_callable_subtype ->
+    # -> infer_constraints.
+    inferring = None  # type: List[mypy.types.Instance]
+    # '_cache' and '_cache_proper' are subtype caches, implemented as sets of pairs
+    # of (subtype, supertype), where supertypes are instances of given TypeInfo.
+    # We need the caches, since subtype checks for structural types are very slow.
+    _cache = None  # type: Set[Tuple[mypy.types.Type, mypy.types.Type]]
+    _cache_proper = None  # type: Set[Tuple[mypy.types.Type, mypy.types.Type]]
+    # 'inferring' and 'assuming' can't be also made sets, since we need to use
+    # is_same_type to correctly treat unions.
+
     # Classes inheriting from Enum shadow their true members with a __getattr__, so we
     # have to treat them as a special case.
     is_enum = False
@@ -2002,9 +1976,15 @@ class TypeInfo(SymbolNode):
     # Is this a newtype type?
     is_newtype = False
 
+    # If during analysis of ClassDef associated with this TypeInfo a syntethic
+    # type (NamedTuple or TypedDict) was generated, store the corresponding
+    # TypeInfo here. (This attribute does not need to be serialized, it is only
+    # needed during the semantic passes.)
+    replaced = None  # type: TypeInfo
+
     FLAGS = [
         'is_abstract', 'is_enum', 'fallback_to_any', 'is_named_tuple',
-        'is_newtype'
+        'is_newtype', 'is_protocol', 'runtime_protocol'
     ]
 
     def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> None:
@@ -2020,6 +2000,11 @@ class TypeInfo(SymbolNode):
         self._fullname = defn.fullname
         self.is_abstract = False
         self.abstract_attributes = []
+        self.assuming = []
+        self.assuming_proper = []
+        self.inferring = []
+        self._cache = set()
+        self._cache_proper = set()
         self.add_type_vars()
 
     def add_type_vars(self) -> None:
@@ -2038,19 +2023,37 @@ class TypeInfo(SymbolNode):
         """Is the type generic (i.e. does it have type variables)?"""
         return len(self.type_vars) > 0
 
-    def get(self, name: str) -> Optional['SymbolTableNode']:
+    def get(self, name: str) -> 'Optional[SymbolTableNode]':
+        if self.mro is None:  # Might be because of a previous error.
+            return None
+
         for cls in self.mro:
             n = cls.names.get(name)
             if n:
                 return n
         return None
 
-    def get_containing_type_info(self, name: str) -> Optional['TypeInfo']:
+    def get_containing_type_info(self, name: str) -> 'Optional[TypeInfo]':
         for cls in self.mro:
             if name in cls.names:
                 return cls
         return None
 
+    def record_subtype_cache_entry(self, left: 'mypy.types.Instance',
+                                   right: 'mypy.types.Instance',
+                                   proper_subtype: bool = False) -> None:
+        if proper_subtype:
+            self._cache_proper.add((left, right))
+        else:
+            self._cache.add((left, right))
+
+    def is_cached_subtype_check(self, left: 'mypy.types.Instance',
+                                right: 'mypy.types.Instance',
+                                proper_subtype: bool = False) -> bool:
+        if not proper_subtype:
+            return (left, right) in self._cache
+        return (left, right) in self._cache_proper
+
     def __getitem__(self, name: str) -> 'SymbolTableNode':
         n = self.get(name)
         if n:
@@ -2150,8 +2153,8 @@ class TypeInfo(SymbolNode):
         return self.dump()
 
     def dump(self,
-             str_conv: 'mypy.strconv.StrConv' = None,
-             type_str_conv: 'mypy.types.TypeStrVisitor' = None) -> str:
+             str_conv: 'Optional[mypy.strconv.StrConv]' = None,
+             type_str_conv: 'Optional[mypy.types.TypeStrVisitor]' = None) -> str:
         """Return a string dump of the contents of the TypeInfo."""
         if not str_conv:
             str_conv = mypy.strconv.StrConv()
@@ -2191,6 +2194,7 @@ class TypeInfo(SymbolNode):
                 'names': self.names.serialize(self.fullname()),
                 'defn': self.defn.serialize(),
                 'abstract_attributes': self.abstract_attributes,
+                'protocol_members': self.protocol_members,
                 'type_vars': self.type_vars,
                 'bases': [b.serialize() for b in self.bases],
                 '_promote': None if self._promote is None else self._promote.serialize(),
@@ -2214,6 +2218,7 @@ class TypeInfo(SymbolNode):
         ti._fullname = data['fullname']
         # TODO: Is there a reason to reconstruct ti.subtypes?
         ti.abstract_attributes = data['abstract_attributes']
+        ti.protocol_members = data['protocol_members']
         ti.type_vars = data['type_vars']
         ti.bases = [mypy.types.Instance.deserialize(b) for b in data['bases']]
         ti._promote = (None if data['_promote'] is None
@@ -2274,6 +2279,8 @@ class SymbolTableNode:
     # If False, this name won't be imported via 'from <module> import *'.
     # This has no effect on names within classes.
     module_public = True
+    # If True, the name will be never exported (needed for stub files)
+    module_hidden = False
     # For deserialized MODULE_REF nodes, the referenced module name;
     # for other nodes, optionally the name of the referenced object.
     cross_ref = None  # type: Optional[str]
@@ -2282,15 +2289,21 @@ class SymbolTableNode:
     # Was this defined by assignment to self attribute?
     implicit = False  # type: bool
 
-    def __init__(self, kind: int, node: Optional[SymbolNode], mod_id: str = None,
-                 typ: 'mypy.types.Type' = None,
-                 module_public: bool = True, normalized: bool = False,
+    def __init__(self,
+                 kind: int,
+                 node: Optional[SymbolNode],
+                 mod_id: Optional[str] = None,
+                 typ: 'Optional[mypy.types.Type]' = None,
+                 module_public: bool = True,
+                 normalized: bool = False,
                  alias_tvars: Optional[List[str]] = None,
-                 implicit: bool = False) -> None:
+                 implicit: bool = False,
+                 module_hidden: bool = False) -> None:
         self.kind = kind
         self.node = node
         self.type_override = typ
         self.mod_id = mod_id
+        self.module_hidden = module_hidden
         self.module_public = module_public
         self.normalized = normalized
         self.alias_tvars = alias_tvars
@@ -2336,6 +2349,8 @@ class SymbolTableNode:
         data = {'.class': 'SymbolTableNode',
                 'kind': node_kinds[self.kind],
                 }  # type: JsonDict
+        if self.module_hidden:
+            data['module_hidden'] = True
         if not self.module_public:
             data['module_public'] = False
         if self.normalized:
@@ -2377,6 +2392,8 @@ class SymbolTableNode:
             stnode = SymbolTableNode(kind, node, typ=typ)
             if 'alias_tvars' in data:
                 stnode.alias_tvars = data['alias_tvars']
+        if 'module_hidden' in data:
+            stnode.module_hidden = data['module_hidden']
         if 'module_public' in data:
             stnode.module_public = data['module_public']
         if 'normalized' in data:
diff --git a/mypy/options.py b/mypy/options.py
index 7d0fe09..c6ee84c 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -1,8 +1,9 @@
+from collections import OrderedDict
 import fnmatch
 import pprint
 import sys
 
-from typing import Mapping, Optional, Tuple, List, Pattern, Dict
+from typing import Dict, List, Mapping, MutableMapping, Optional, Pattern, Set, Tuple
 
 from mypy import defaults
 
@@ -20,6 +21,7 @@ class Options:
         "ignore_missing_imports",
         "follow_imports",
         "disallow_any",
+        "disallow_subclassing_any",
         "disallow_untyped_calls",
         "disallow_untyped_defs",
         "check_untyped_defs",
@@ -32,6 +34,7 @@ class Options:
         "strict_boolean",
         "no_implicit_optional",
         "strict_optional",
+        "disallow_untyped_decorators",
     }
 
     OPTIONS_AFFECTING_CACHE = ((PER_MODULE_OPTIONS | {"quick_and_dirty", "platform"})
@@ -56,9 +59,15 @@ class Options:
         # Disallow defining untyped (or incompletely typed) functions
         self.disallow_untyped_defs = False
 
+        # Disallow defining incompletely typed functions
+        self.disallow_incomplete_defs = False
+
         # Type check unannotated functions
         self.check_untyped_defs = False
 
+        # Disallow decorating typed functions with untyped decorators
+        self.disallow_untyped_decorators = False
+
         # Disallow subclassing values of type 'Any'
         self.disallow_subclassing_any = False
 
@@ -78,6 +87,9 @@ class Options:
         # Warn about unused '# type: ignore' comments
         self.warn_unused_ignores = False
 
+        # Warn about unused '[mypy-<pattern>] config sections
+        self.warn_unused_configs = False
+
         # Files in which to ignore all non-fatal errors
         self.ignore_errors = False
 
@@ -120,7 +132,10 @@ class Options:
         self.plugins = []  # type: List[str]
 
         # Per-module options (raw)
-        self.per_module_options = {}  # type: Dict[Pattern[str], Dict[str, object]]
+        pm_opts = OrderedDict()  # type: OrderedDict[Pattern[str], Dict[str, object]]
+        self.per_module_options = pm_opts
+        # Map pattern back to glob
+        self.unused_configs = OrderedDict()  # type: OrderedDict[Pattern[str], str]
 
         # -- development options --
         self.verbosity = 0  # More verbose messages (for troubleshooting)
@@ -154,6 +169,8 @@ class Options:
         updates = {}
         for pattern in self.per_module_options:
             if self.module_matches_pattern(module, pattern):
+                if pattern in self.unused_configs:
+                    del self.unused_configs[pattern]
                 updates.update(self.per_module_options[pattern])
         if not updates:
             return self
diff --git a/mypy/parse.py b/mypy/parse.py
index 2e02269..3583550 100644
--- a/mypy/parse.py
+++ b/mypy/parse.py
@@ -16,7 +16,7 @@ def parse(source: Union[str, bytes],
 
     The python_version (major, minor) option determines the Python syntax variant.
     """
-    is_stub_file = bool(fnam) and fnam.endswith('.pyi')
+    is_stub_file = fnam.endswith('.pyi')
     if options.python_version[0] >= 3 or is_stub_file:
         import mypy.fastparse
         return mypy.fastparse.parse(source,
diff --git a/mypy/plugin.py b/mypy/plugin.py
index b362cbf..27917a6 100644
--- a/mypy/plugin.py
+++ b/mypy/plugin.py
@@ -7,7 +7,7 @@ from typing import Callable, List, Tuple, Optional, NamedTuple, TypeVar
 from mypy.nodes import Expression, StrExpr, IntExpr, UnaryExpr, Context, DictExpr
 from mypy.types import (
     Type, Instance, CallableType, TypedDictType, UnionType, NoneTyp, FunctionLike, TypeVarType,
-    AnyType, TypeList, UnboundType
+    AnyType, TypeList, UnboundType, TypeOfAny
 )
 from mypy.messages import MessageBuilder
 from mypy.options import Options
@@ -311,7 +311,7 @@ def typed_dict_get_callback(ctx: MethodContext) -> Type:
                         return UnionType.make_simplified_union([value_type, ctx.arg_types[1][0]])
             else:
                 ctx.api.msg.typeddict_key_not_found(ctx.type, key, ctx.context)
-                return AnyType()
+                return AnyType(TypeOfAny.from_error)
     return ctx.default_return_type
 
 
diff --git a/mypy/report.py b/mypy/report.py
index a221a25..daf791b 100644
--- a/mypy/report.py
+++ b/mypy/report.py
@@ -1,10 +1,12 @@
 """Classes for producing HTML reports about imprecision."""
 
 from abc import ABCMeta, abstractmethod
+import collections
 import json
 import os
 import shutil
 import tokenize
+import typing
 from operator import attrgetter
 from urllib.request import pathname2url
 from typing import Any, Callable, Dict, List, Optional, Tuple, cast
@@ -13,11 +15,13 @@ import time
 
 import sys
 
+import itertools
+
 from mypy.nodes import MypyFile, Expression, FuncDef
 from mypy import stats
 from mypy.options import Options
 from mypy.traverser import TraverserVisitor
-from mypy.types import Type
+from mypy.types import Type, TypeOfAny
 from mypy.version import __version__
 
 try:
@@ -26,6 +30,14 @@ try:
 except ImportError:
     LXML_INSTALLED = False
 
+type_of_any_name_map = collections.OrderedDict([
+    (TypeOfAny.unannotated, "Unannotated"),
+    (TypeOfAny.explicit, "Explicit"),
+    (TypeOfAny.from_unimported_type, "Unimported"),
+    (TypeOfAny.from_omitted_generics, "Omitted Generics"),
+    (TypeOfAny.from_error, "Error"),
+    (TypeOfAny.special_form, "Special Form"),
+])  # type: collections.OrderedDict[TypeOfAny.TypeOfAny, str]
 
 reporter_classes = {}  # type: Dict[str, Tuple[Callable[[Reports, str], AbstractReporter], bool]]
 
@@ -130,7 +142,7 @@ class LineCountReporter(AbstractReporter):
 
     def on_finish(self) -> None:
         counts = sorted(((c, p) for p, c in self.counts.items()),
-                        reverse=True)  # type: List[Tuple[tuple, str]]
+                        reverse=True)  # type: List[Tuple[Tuple[int, int, int, int], str]]
         total_counts = tuple(sum(c[i] for c, p in counts)
                              for i in range(4))
         with open(os.path.join(self.output_dir, 'linecount.txt'), 'w') as f:
@@ -147,6 +159,7 @@ class AnyExpressionsReporter(AbstractReporter):
     def __init__(self, reports: Reports, output_dir: str) -> None:
         super().__init__(reports, output_dir)
         self.counts = {}  # type: Dict[str, Tuple[int, int]]
+        self.any_types_counter = {}  # type: Dict[str, typing.Counter[TypeOfAny.TypeOfAny]]
         stats.ensure_dir_exists(output_dir)
 
     def on_file(self,
@@ -154,58 +167,81 @@ class AnyExpressionsReporter(AbstractReporter):
                 type_map: Dict[Expression, Type],
                 options: Options) -> None:
         visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(),
-                                          typemap=type_map, all_nodes=True)
+                                          typemap=type_map, all_nodes=True,
+                                          visit_untyped_defs=False)
         tree.accept(visitor)
+        self.any_types_counter[tree.fullname()] = visitor.type_of_any_counter
         num_unanalyzed_lines = list(visitor.line_map.values()).count(stats.TYPE_UNANALYZED)
         # count each line of dead code as one expression of type "Any"
-        num_any = visitor.num_any + num_unanalyzed_lines
-        num_total = visitor.num_imprecise + visitor.num_precise + num_any
+        num_any = visitor.num_any_exprs + num_unanalyzed_lines
+        num_total = visitor.num_imprecise_exprs + visitor.num_precise_exprs + num_any
         if num_total > 0:
             self.counts[tree.fullname()] = (num_any, num_total)
 
     def on_finish(self) -> None:
+        self._report_any_exprs()
+        self._report_types_of_anys()
+
+    def _write_out_report(self,
+                          filename: str,
+                          header: List[str],
+                          rows: List[List[str]],
+                          footer: List[str],
+                          ) -> None:
+        row_len = len(header)
+        assert all(len(row) == row_len for row in rows + [header, footer])
+        min_column_distance = 3  # minimum distance between numbers in two columns
+        widths = [-1] * row_len
+        for row in rows + [header, footer]:
+            for i, value in enumerate(row):
+                widths[i] = max(widths[i], len(value))
+        for i, w in enumerate(widths):
+            # Do not add min_column_distance to the first column.
+            if i > 0:
+                widths[i] = w + min_column_distance
+        with open(os.path.join(self.output_dir, filename), 'w') as f:
+            header_str = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(header, widths)))
+            separator = '-' * len(header_str)
+            f.write(header_str + '\n')
+            f.write(separator + '\n')
+            for row_values in rows:
+                r = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(row_values, widths)))
+                f.writelines(r + '\n')
+            f.write(separator + '\n')
+            footer_str = ("{:>{}}" * len(widths)).format(*itertools.chain(*zip(footer, widths)))
+            f.writelines(footer_str + '\n')
+
+    def _report_any_exprs(self) -> None:
         total_any = sum(num_any for num_any, _ in self.counts.values())
         total_expr = sum(total for _, total in self.counts.values())
         total_coverage = 100.0
         if total_expr > 0:
             total_coverage = (float(total_expr - total_any) / float(total_expr)) * 100
 
-        any_column_name = "Anys"
-        total_column_name = "Exprs"
+        column_names = ["Name", "Anys", "Exprs", "Coverage"]
+        rows = []  # type: List[List[str]]
+        for filename in sorted(self.counts):
+            (num_any, num_total) = self.counts[filename]
+            coverage = (float(num_total - num_any) / float(num_total)) * 100
+            coverage_str = '{:.2f}%'.format(coverage)
+            rows.append([filename, str(num_any), str(num_total), coverage_str])
+        total_row = ["Total", str(total_any), str(total_expr), '{:.2f}%'.format(total_coverage)]
+        self._write_out_report('any-exprs.txt', column_names, rows, total_row)
+
+    def _report_types_of_anys(self) -> None:
+        total_counter = collections.Counter()  # type: typing.Counter[TypeOfAny.TypeOfAny]
+        for counter in self.any_types_counter.values():
+            for any_type, value in counter.items():
+                total_counter[any_type] += value
         file_column_name = "Name"
         total_row_name = "Total"
-        coverage_column_name = "Coverage"
-        # find the longest filename all files
-        name_width = max([len(file) for file in self.counts] +
-                         [len(file_column_name), len(total_row_name)])
-        # totals are the largest numbers in their column - no need to look at others
-        min_column_distance = 3  # minimum distance between numbers in two columns
-        any_width = max(len(str(total_any)) + min_column_distance, len(any_column_name))
-        exprs_width = max(len(str(total_expr)) + min_column_distance, len(total_column_name))
-        coverage_width = len(coverage_column_name) + min_column_distance
-        header = '{:{name_width}} {:>{any_width}} {:>{total_width}} {:>{coverage_width}}'.format(
-            file_column_name, any_column_name, total_column_name, coverage_column_name,
-            name_width=name_width, any_width=any_width, total_width=exprs_width,
-            coverage_width=coverage_width)
-
-        with open(os.path.join(self.output_dir, 'any-exprs.txt'), 'w') as f:
-            f.write(header + '\n')
-            separator = '-' * len(header) + '\n'
-            f.write(separator)
-            coverage_width -= 1  # subtract one for '%'
-            for file in sorted(self.counts):
-                (num_any, num_total) = self.counts[file]
-                coverage = (float(num_total - num_any) / float(num_total)) * 100
-                f.write('{:{name_width}} {:{any_width}} {:{total_width}} '
-                        '{:>{coverage_width}.2f}%\n'.
-                        format(file, num_any, num_total, coverage, name_width=name_width,
-                               any_width=any_width, total_width=exprs_width,
-                               coverage_width=coverage_width))
-            f.write(separator)
-            f.write('{:{name_width}} {:{any_width}} {:{total_width}} {:>{coverage_width}.2f}%\n'
-                    .format(total_row_name, total_any, total_expr, total_coverage,
-                            name_width=name_width, any_width=any_width, total_width=exprs_width,
-                            coverage_width=coverage_width))
+        column_names = [file_column_name] + list(type_of_any_name_map.values())
+        rows = []  # type: List[List[str]]
+        for filename, counter in self.any_types_counter.items():
+            rows.append([filename] + [str(counter[typ]) for typ in type_of_any_name_map])
+        total_row = [total_row_name] + [str(total_counter[typ])
+                                        for typ in type_of_any_name_map]
+        self._write_out_report('types-of-anys.txt', column_names, rows, total_row)
 
 
 register_reporter('any-exprs', AnyExpressionsReporter)
@@ -402,7 +438,8 @@ class MemoryXmlReporter(AbstractReporter):
                 etree.SubElement(root, 'line',
                                  number=str(lineno),
                                  precision=stats.precision_names[status],
-                                 content=line_text.rstrip('\n'))
+                                 content=line_text.rstrip('\n'),
+                                 any_info=self._get_any_info_for_line(visitor, lineno))
         # Assumes a layout similar to what XmlReporter uses.
         xslt_path = os.path.relpath('mypy-html.xslt', path)
         transform_pi = etree.ProcessingInstruction('xml-stylesheet',
@@ -413,6 +450,19 @@ class MemoryXmlReporter(AbstractReporter):
         self.last_xml = doc
         self.files.append(file_info)
 
+    @staticmethod
+    def _get_any_info_for_line(visitor: stats.StatisticsVisitor, lineno: int) -> str:
+        if lineno in visitor.any_line_map:
+            result = "Any Types on this line: "
+            counter = collections.Counter()  # type: typing.Counter[TypeOfAny.TypeOfAny]
+            for typ in visitor.any_line_map[lineno]:
+                counter[typ.type_of_any] += 1
+            for any_type, occurrences in counter.items():
+                result += "\n{} (x{})".format(type_of_any_name_map[any_type], occurrences)
+            return result
+        else:
+            return "No Anys on this line!"
+
     def on_finish(self) -> None:
         self.last_xml = None
         # index_path = os.path.join(self.output_dir, 'index.xml')
diff --git a/mypy/sametypes.py b/mypy/sametypes.py
index 0531ecc..cba80e1 100644
--- a/mypy/sametypes.py
+++ b/mypy/sametypes.py
@@ -68,8 +68,8 @@ class SameTypeVisitor(TypeVisitor[bool]):
         # We can get here when isinstance is used inside a lambda
         # whose type is being inferred. In any event, we have no reason
         # to think that an ErasedType will end up being the same as
-        # any other type, even another ErasedType.
-        return False
+        # any other type, except another ErasedType (for protocols).
+        return isinstance(self.right, ErasedType)
 
     def visit_deleted_type(self, left: DeletedType) -> bool:
         return isinstance(self.right, DeletedType)
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 15ccbe5..53cecba 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -70,6 +70,7 @@ from mypy.nodes import (
     COVARIANT, CONTRAVARIANT, INVARIANT, UNBOUND_IMPORTED, LITERAL_YES, ARG_OPT, nongen_builtins,
     collections_type_aliases, get_member_expr_fullname,
 )
+from mypy.literals import literal
 from mypy.tvar_scope import TypeVarScope
 from mypy.typevars import has_no_typevars, fill_typevars
 from mypy.visitor import NodeVisitor
@@ -79,7 +80,8 @@ from mypy.messages import CANNOT_ASSIGN_TO_TYPE, MessageBuilder
 from mypy.types import (
     FunctionLike, UnboundType, TypeVarDef, TypeType, TupleType, UnionType, StarType, function_type,
     TypedDictType, NoneTyp, CallableType, Overloaded, Instance, Type, TypeVarType, AnyType,
-    TypeTranslator,
+    TypeTranslator, TypeOfAny, TypeVisitor, UninhabitedType, ErasedType, DeletedType,
+    PartialType, ForwardRef
 )
 from mypy.nodes import implicit_module_attrs
 from mypy.typeanal import (
@@ -181,7 +183,7 @@ SUGGESTED_TEST_FIXTURES = {
 }
 
 
-class SemanticAnalyzer(NodeVisitor):
+class SemanticAnalyzer(NodeVisitor[None]):
     """Semantically analyze parsed mypy files.
 
     The analyzer binds names and does various consistency checks for a
@@ -543,11 +545,17 @@ class SemanticAnalyzer(NodeVisitor):
             if defn.impl is not None:
                 assert defn.impl is defn.items[-1]
                 defn.items = defn.items[:-1]
-
             elif not self.is_stub_file and not non_overload_indexes:
-                self.fail(
-                    "An overloaded function outside a stub file must have an implementation",
-                    defn)
+                if not (self.is_class_scope() and self.type.is_protocol):
+                    self.fail(
+                        "An overloaded function outside a stub file must have an implementation",
+                        defn)
+                else:
+                    for item in defn.items:
+                        if isinstance(item, Decorator):
+                            item.func.is_abstract = True
+                        else:
+                            item.is_abstract = True
 
         if types:
             defn.type = Overloaded(types)
@@ -656,7 +664,8 @@ class SemanticAnalyzer(NodeVisitor):
         if len(sig.arg_types) < len(fdef.arguments):
             self.fail('Type signature has too few arguments', fdef)
             # Add dummy Any arguments to prevent crashes later.
-            extra_anys = [AnyType()] * (len(fdef.arguments) - len(sig.arg_types))
+            num_extra_anys = len(fdef.arguments) - len(sig.arg_types)
+            extra_anys = [AnyType(TypeOfAny.from_error)] * num_extra_anys
             sig.arg_types.extend(extra_anys)
         elif len(sig.arg_types) > len(fdef.arguments):
             self.fail('Type signature has too many arguments', fdef, blocker=True)
@@ -670,6 +679,8 @@ class SemanticAnalyzer(NodeVisitor):
     @contextmanager
     def analyze_class_body(self, defn: ClassDef) -> Iterator[bool]:
         with self.tvar_scope_frame(self.tvar_scope.class_frame()):
+            is_protocol = self.detect_protocol_base(defn)
+            self.update_metaclass(defn)
             self.clean_up_bases_and_infer_type_variables(defn)
             self.analyze_class_keywords(defn)
             if self.analyze_typeddict_classdef(defn):
@@ -709,13 +720,12 @@ class SemanticAnalyzer(NodeVisitor):
                 self.setup_class_def_analysis(defn)
                 self.analyze_base_classes(defn)
                 self.analyze_metaclass(defn)
-
+                defn.info.is_protocol = is_protocol
+                defn.info.runtime_protocol = False
                 for decorator in defn.decorators:
                     self.analyze_class_decorator(defn, decorator)
-
                 self.enter_class(defn.info)
                 yield True
-
                 self.calculate_abstract_status(defn.info)
                 self.setup_type_promotion(defn)
 
@@ -742,6 +752,12 @@ class SemanticAnalyzer(NodeVisitor):
 
     def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None:
         decorator.accept(self)
+        if (isinstance(decorator, RefExpr) and
+                decorator.fullname in ('typing.runtime', 'typing_extensions.runtime')):
+            if defn.info.is_protocol:
+                defn.info.runtime_protocol = True
+            else:
+                self.fail('@runtime can only be used with protocol classes', defn)
 
     def calculate_abstract_status(self, typ: TypeInfo) -> None:
         """Calculate abstract status of a class.
@@ -767,6 +783,10 @@ class SemanticAnalyzer(NodeVisitor):
                     if fdef.is_abstract and name not in concrete:
                         typ.is_abstract = True
                         abstract.append(name)
+                elif isinstance(node, Var):
+                    if node.is_abstract_var and name not in concrete:
+                        typ.is_abstract = True
+                        abstract.append(name)
                 concrete.add(name)
         typ.abstract_attributes = sorted(abstract)
 
@@ -789,6 +809,21 @@ class SemanticAnalyzer(NodeVisitor):
                 promote_target = self.named_type_or_none(promotions[defn.fullname])
         defn.info._promote = promote_target
 
+    def detect_protocol_base(self, defn: ClassDef) -> bool:
+        for base_expr in defn.base_type_exprs:
+            try:
+                base = expr_to_unanalyzed_type(base_expr)
+            except TypeTranslationError:
+                continue  # This will be reported later
+            if not isinstance(base, UnboundType):
+                continue
+            sym = self.lookup_qualified(base.name, base)
+            if sym is None or sym.node is None:
+                continue
+            if sym.node.fullname() in ('typing.Protocol', 'typing_extensions.Protocol'):
+                return True
+        return False
+
     def clean_up_bases_and_infer_type_variables(self, defn: ClassDef) -> None:
         """Remove extra base classes such as Generic and infer type vars.
 
@@ -799,12 +834,8 @@ class SemanticAnalyzer(NodeVisitor):
         Now we will remove Generic[T] from bases of Foo and infer that the
         type variable 'T' is a type argument of Foo.
 
-        We also process six.with_metaclass() here.
-
         Note that this is performed *before* semantic analysis.
         """
-        # First process six.with_metaclass if present and well-formed
-        defn.base_type_exprs, defn.metaclass = self.check_with_metaclass(defn)
         removed = []  # type: List[int]
         declared_tvars = []  # type: TypeVarList
         for i, base_expr in enumerate(defn.base_type_exprs):
@@ -816,17 +847,26 @@ class SemanticAnalyzer(NodeVisitor):
             tvars = self.analyze_typevar_declaration(base)
             if tvars is not None:
                 if declared_tvars:
-                    self.fail('Duplicate Generic in bases', defn)
+                    self.fail('Only single Generic[...] or Protocol[...] can be in bases', defn)
                 removed.append(i)
                 declared_tvars.extend(tvars)
+            if isinstance(base, UnboundType):
+                sym = self.lookup_qualified(base.name, base)
+                if sym is not None and sym.node is not None:
+                    if (sym.node.fullname() in ('typing.Protocol',
+                                                'typing_extensions.Protocol') and
+                            i not in removed):
+                        # also remove bare 'Protocol' bases
+                        removed.append(i)
 
         all_tvars = self.get_all_bases_tvars(defn, removed)
         if declared_tvars:
             if len(remove_dups(declared_tvars)) < len(declared_tvars):
-                self.fail("Duplicate type variables in Generic[...]", defn)
+                self.fail("Duplicate type variables in Generic[...] or Protocol[...]", defn)
             declared_tvars = remove_dups(declared_tvars)
             if not set(all_tvars).issubset(set(declared_tvars)):
-                self.fail("If Generic[...] is present it should list all type variables", defn)
+                self.fail("If Generic[...] or Protocol[...] is present"
+                          " it should list all type variables", defn)
                 # In case of error, Generic tvars will go first
                 declared_tvars = remove_dups(declared_tvars + all_tvars)
         else:
@@ -848,7 +888,9 @@ class SemanticAnalyzer(NodeVisitor):
         sym = self.lookup_qualified(unbound.name, unbound)
         if sym is None or sym.node is None:
             return None
-        if sym.node.fullname() == 'typing.Generic':
+        if (sym.node.fullname() == 'typing.Generic' or
+                sym.node.fullname() == 'typing.Protocol' and t.args or
+                sym.node.fullname() == 'typing_extensions.Protocol' and t.args):
             tvars = []  # type: TypeVarList
             for arg in unbound.args:
                 tvar = self.analyze_unbound_tvar(arg)
@@ -900,8 +942,11 @@ class SemanticAnalyzer(NodeVisitor):
                         info = self.build_namedtuple_typeinfo(
                             defn.name, items, types, default_items)
                         node.node = info
+                        defn.info.replaced = info
                         defn.info = info
                         defn.analyzed = NamedTupleExpr(info)
+                        defn.analyzed.line = defn.line
+                        defn.analyzed.column = defn.column
                         return info
         return None
 
@@ -939,7 +984,9 @@ class SemanticAnalyzer(NodeVisitor):
                 # Append name and type in this case...
                 name = stmt.lvalues[0].name
                 items.append(name)
-                types.append(AnyType() if stmt.type is None else self.anal_type(stmt.type))
+                types.append(AnyType(TypeOfAny.unannotated)
+                             if stmt.type is None
+                             else self.anal_type(stmt.type))
                 # ...despite possible minor failures that allow further analyzis.
                 if name.startswith('_'):
                     self.fail('NamedTuple field name cannot start with an underscore: {}'
@@ -999,6 +1046,10 @@ class SemanticAnalyzer(NodeVisitor):
                     defn.has_incompatible_baseclass = True
                 info.tuple_type = base
                 base_types.append(base.fallback)
+                if isinstance(base_expr, CallExpr):
+                    defn.analyzed = NamedTupleExpr(base.fallback.type)
+                    defn.analyzed.line = defn.line
+                    defn.analyzed.column = defn.column
             elif isinstance(base, Instance):
                 if base.type.is_newtype:
                     self.fail("Cannot subclass NewType", defn)
@@ -1044,26 +1095,56 @@ class SemanticAnalyzer(NodeVisitor):
         if defn.info.is_enum and defn.type_vars:
             self.fail("Enum class cannot be generic", defn)
 
-    def check_with_metaclass(self, defn: ClassDef) -> Tuple[List[Expression], Optional[str]]:
-        # Special-case six.with_metaclass(M, B1, B2, ...).
-        base_type_exprs, metaclass = defn.base_type_exprs, defn.metaclass
-        if metaclass is None and len(base_type_exprs) == 1:
-            base_expr = base_type_exprs[0]
+    def update_metaclass(self, defn: ClassDef) -> None:
+        """Lookup for special metaclass declarations, and update defn fields accordingly.
+
+        * __metaclass__ attribute in Python 2
+        * six.with_metaclass(M, B1, B2, ...)
+        * @six.add_metaclass(M)
+        """
+
+        # Look for "__metaclass__ = <metaclass>" in Python 2
+        python2_meta_expr = None  # type: Optional[Expression]
+        if self.options.python_version[0] == 2:
+            for body_node in defn.defs.body:
+                if isinstance(body_node, ClassDef) and body_node.name == "__metaclass__":
+                    self.fail("Metaclasses defined as inner classes are not supported", body_node)
+                    break
+                elif isinstance(body_node, AssignmentStmt) and len(body_node.lvalues) == 1:
+                    lvalue = body_node.lvalues[0]
+                    if isinstance(lvalue, NameExpr) and lvalue.name == "__metaclass__":
+                        python2_meta_expr = body_node.rvalue
+
+        # Look for six.with_metaclass(M, B1, B2, ...)
+        with_meta_expr = None  # type: Optional[Expression]
+        if len(defn.base_type_exprs) == 1:
+            base_expr = defn.base_type_exprs[0]
             if isinstance(base_expr, CallExpr) and isinstance(base_expr.callee, RefExpr):
                 base_expr.callee.accept(self)
                 if (base_expr.callee.fullname == 'six.with_metaclass'
                         and len(base_expr.args) >= 1
                         and all(kind == ARG_POS for kind in base_expr.arg_kinds)):
-                    metaclass_expr = base_expr.args[0]
-                    if isinstance(metaclass_expr, NameExpr):
-                        metaclass = metaclass_expr.name
-                    elif isinstance(metaclass_expr, MemberExpr):
-                        metaclass = get_member_expr_fullname(metaclass_expr)
-                    else:
-                        self.fail("Dynamic metaclass not supported for '%s'" % defn.name,
-                                  metaclass_expr)
-                    return (base_expr.args[1:], metaclass)
-        return (base_type_exprs, metaclass)
+                    with_meta_expr = base_expr.args[0]
+                    defn.base_type_exprs = base_expr.args[1:]
+
+        # Look for @six.add_metaclass(M)
+        add_meta_expr = None  # type: Optional[Expression]
+        for dec_expr in defn.decorators:
+            if isinstance(dec_expr, CallExpr) and isinstance(dec_expr.callee, RefExpr):
+                dec_expr.callee.accept(self)
+                if (dec_expr.callee.fullname == 'six.add_metaclass'
+                    and len(dec_expr.args) == 1
+                        and dec_expr.arg_kinds[0] == ARG_POS):
+                    add_meta_expr = dec_expr.args[0]
+                    break
+
+        metas = {defn.metaclass, python2_meta_expr, with_meta_expr, add_meta_expr} - {None}
+        if len(metas) == 0:
+            return
+        if len(metas) > 1:
+            self.fail("Multiple metaclass definitions", defn)
+            return
+        defn.metaclass = metas.pop()
 
     def expr_to_analyzed_type(self, expr: Expression) -> Type:
         if isinstance(expr, CallExpr):
@@ -1112,36 +1193,15 @@ class SemanticAnalyzer(NodeVisitor):
         return False
 
     def analyze_metaclass(self, defn: ClassDef) -> None:
-        error_context = defn  # type: Context
-        if defn.metaclass is None and self.options.python_version[0] == 2:
-            # Look for "__metaclass__ = <metaclass>" in Python 2.
-            for body_node in defn.defs.body:
-                if isinstance(body_node, ClassDef) and body_node.name == "__metaclass__":
-                    self.fail("Metaclasses defined as inner classes are not supported", body_node)
-                    return
-                elif isinstance(body_node, AssignmentStmt) and len(body_node.lvalues) == 1:
-                    lvalue = body_node.lvalues[0]
-                    if isinstance(lvalue, NameExpr) and lvalue.name == "__metaclass__":
-                        error_context = body_node.rvalue
-                        if isinstance(body_node.rvalue, NameExpr):
-                            name = body_node.rvalue.name
-                        elif isinstance(body_node.rvalue, MemberExpr):
-                            name = get_member_expr_fullname(body_node.rvalue)
-                        else:
-                            name = None
-                        if name:
-                            defn.metaclass = name
-                        else:
-                            self.fail(
-                                "Dynamic metaclass not supported for '%s'" % defn.name,
-                                body_node
-                            )
-                            return
         if defn.metaclass:
-            if defn.metaclass == '<error>':
-                self.fail("Dynamic metaclass not supported for '%s'" % defn.name, error_context)
+            if isinstance(defn.metaclass, NameExpr):
+                metaclass_name = defn.metaclass.name
+            elif isinstance(defn.metaclass, MemberExpr):
+                metaclass_name = get_member_expr_fullname(defn.metaclass)
+            else:
+                self.fail("Dynamic metaclass not supported for '%s'" % defn.name, defn.metaclass)
                 return
-            sym = self.lookup_qualified(defn.metaclass, error_context)
+            sym = self.lookup_qualified(metaclass_name, defn.metaclass)
             if sym is None:
                 # Probably a name error - it is already handled elsewhere
                 return
@@ -1153,10 +1213,11 @@ class SemanticAnalyzer(NodeVisitor):
                 #       attributes, similar to an 'Any' base class.
                 return
             if not isinstance(sym.node, TypeInfo) or sym.node.tuple_type is not None:
-                self.fail("Invalid metaclass '%s'" % defn.metaclass, defn)
+                self.fail("Invalid metaclass '%s'" % metaclass_name, defn.metaclass)
                 return
             if not sym.node.is_metaclass():
-                self.fail("Metaclasses not inheriting from 'type' are not supported", defn)
+                self.fail("Metaclasses not inheriting from 'type' are not supported",
+                          defn.metaclass)
                 return
             inst = fill_typevars(sym.node)
             assert isinstance(inst, Instance)
@@ -1165,7 +1226,7 @@ class SemanticAnalyzer(NodeVisitor):
         if defn.info.metaclass_type is None:
             # Inconsistency may happen due to multiple baseclasses even in classes that
             # do not declare explicit metaclass, but it's harder to catch at this stage
-            if defn.metaclass:
+            if defn.metaclass is not None:
                 self.fail("Inconsistent metaclass structure for '%s'" % defn.name, defn)
 
     def object_type(self) -> Instance:
@@ -1180,7 +1241,7 @@ class SemanticAnalyzer(NodeVisitor):
         leading_type = checkmember.type_object_type(info, self.builtin_type)
         if isinstance(leading_type, Overloaded):
             # Overloaded __init__ is too complex to handle.  Plus it's stubs only.
-            return AnyType()
+            return AnyType(TypeOfAny.special_form)
         else:
             return leading_type
 
@@ -1191,7 +1252,7 @@ class SemanticAnalyzer(NodeVisitor):
         if args:
             # TODO: assert len(args) == len(node.defn.type_vars)
             return Instance(node, args)
-        return Instance(node, [AnyType()] * len(node.defn.type_vars))
+        return Instance(node, [AnyType(TypeOfAny.special_form)] * len(node.defn.type_vars))
 
     def named_type_or_none(self, qualified_name: str, args: List[Type] = None) -> Instance:
         sym = self.lookup_fully_qualified_or_none(qualified_name)
@@ -1202,7 +1263,7 @@ class SemanticAnalyzer(NodeVisitor):
         if args:
             # TODO: assert len(args) == len(node.defn.type_vars)
             return Instance(node, args)
-        return Instance(node, [AnyType()] * len(node.defn.type_vars))
+        return Instance(node, [AnyType(TypeOfAny.unannotated)] * len(node.defn.type_vars))
 
     def is_typeddict(self, expr: Expression) -> bool:
         return (isinstance(expr, RefExpr) and isinstance(expr.node, TypeInfo) and
@@ -1227,8 +1288,11 @@ class SemanticAnalyzer(NodeVisitor):
                     # Building a new TypedDict
                     fields, types, required_keys = self.check_typeddict_classdef(defn)
                     info = self.build_typeddict_typeinfo(defn.name, fields, types, required_keys)
+                    defn.info.replaced = info
                     node.node = info
                     defn.analyzed = TypedDictExpr(info)
+                    defn.analyzed.line = defn.line
+                    defn.analyzed.column = defn.column
                     return True
                 # Extending/merging existing TypedDicts
                 if any(not isinstance(expr, RefExpr) or
@@ -1259,8 +1323,11 @@ class SemanticAnalyzer(NodeVisitor):
                 types.extend(new_types)
                 required_keys.update(new_required_keys)
                 info = self.build_typeddict_typeinfo(defn.name, keys, types, required_keys)
+                defn.info.replaced = info
                 node.node = info
                 defn.analyzed = TypedDictExpr(info)
+                defn.analyzed.line = defn.line
+                defn.analyzed.column = defn.column
                 return True
         return False
 
@@ -1280,7 +1347,7 @@ class SemanticAnalyzer(NodeVisitor):
                 # Still allow pass or ... (for empty TypedDict's).
                 if (not isinstance(stmt, PassStmt) and
                     not (isinstance(stmt, ExpressionStmt) and
-                         isinstance(stmt.expr, EllipsisExpr))):
+                         isinstance(stmt.expr, (EllipsisExpr, StrExpr)))):
                     self.fail(TPDICT_CLASS_ERROR, stmt)
             elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr):
                 # An assignment, but an invalid one.
@@ -1296,7 +1363,9 @@ class SemanticAnalyzer(NodeVisitor):
                     continue
                 # Append name and type in this case...
                 fields.append(name)
-                types.append(AnyType() if stmt.type is None else self.anal_type(stmt.type))
+                types.append(AnyType(TypeOfAny.unannotated)
+                             if stmt.type is None
+                             else self.anal_type(stmt.type))
                 # ...despite possible minor failures that allow further analyzis.
                 if stmt.type is None or hasattr(stmt, 'new_syntax') and not stmt.new_syntax:
                     self.fail(TPDICT_CLASS_ERROR, stmt)
@@ -1317,12 +1386,11 @@ class SemanticAnalyzer(NodeVisitor):
             if as_id is not None:
                 self.add_module_symbol(id, as_id, module_public=True, context=i)
             else:
-                # Modules imported in a stub file without using 'as x' won't get exported when
-                # doing 'from m import *'.
+                # Modules imported in a stub file without using 'as x' won't get exported
                 module_public = not self.is_stub_file
                 base = id.split('.')[0]
                 self.add_module_symbol(base, base, module_public=module_public,
-                                       context=i)
+                                       context=i, module_hidden=not module_public)
                 self.add_submodules_to_parent_modules(id, module_public)
 
     def add_submodules_to_parent_modules(self, id: str, module_public: bool) -> None:
@@ -1351,11 +1419,12 @@ class SemanticAnalyzer(NodeVisitor):
             id = parent
 
     def add_module_symbol(self, id: str, as_id: str, module_public: bool,
-                          context: Context) -> None:
+                          context: Context, module_hidden: bool = False) -> None:
         if id in self.modules:
             m = self.modules[id]
             self.add_symbol(as_id, SymbolTableNode(MODULE_REF, m, self.cur_mod_id,
-                                                   module_public=module_public), context)
+                                                   module_public=module_public,
+                                                   module_hidden=module_hidden), context)
         else:
             self.add_unknown_symbol(as_id, context, is_import=True)
 
@@ -1366,19 +1435,35 @@ class SemanticAnalyzer(NodeVisitor):
         for id, as_id in imp.names:
             node = module.names.get(id) if module else None
             missing = False
+            possible_module_id = import_id + '.' + id
 
             # If the module does not contain a symbol with the name 'id',
             # try checking if it's a module instead.
             if not node or node.kind == UNBOUND_IMPORTED:
-                possible_module_id = import_id + '.' + id
                 mod = self.modules.get(possible_module_id)
                 if mod is not None:
                     node = SymbolTableNode(MODULE_REF, mod, import_id)
                     self.add_submodules_to_parent_modules(possible_module_id, True)
                 elif possible_module_id in self.missing_modules:
                     missing = True
-
-            if node and node.kind != UNBOUND_IMPORTED:
+            # If it is still not resolved, and the module is a stub
+            # check for a module level __getattr__
+            if module and not node and module.is_stub and '__getattr__' in module.names:
+                getattr_defn = module.names['__getattr__']
+                if isinstance(getattr_defn.node, FuncDef):
+                    if isinstance(getattr_defn.node.type, CallableType):
+                        typ = getattr_defn.node.type.ret_type
+                    else:
+                        typ = AnyType(TypeOfAny.from_error)
+                    if as_id:
+                        name = as_id
+                    else:
+                        name = id
+                    ast_node = Var(name, type=typ)
+                    symbol = SymbolTableNode(GDEF, ast_node, name)
+                    self.add_symbol(name, symbol, imp)
+                    return
+            if node and node.kind != UNBOUND_IMPORTED and not node.module_hidden:
                 node = self.normalize_type_alias(node, imp)
                 if not node:
                     return
@@ -1391,12 +1476,14 @@ class SemanticAnalyzer(NodeVisitor):
                         continue
                 # 'from m import x as x' exports x in a stub file.
                 module_public = not self.is_stub_file or as_id is not None
+                module_hidden = not module_public and possible_module_id not in self.modules
                 symbol = SymbolTableNode(node.kind, node.node,
                                          self.cur_mod_id,
                                          node.type_override,
                                          module_public=module_public,
                                          normalized=node.normalized,
-                                         alias_tvars=node.alias_tvars)
+                                         alias_tvars=node.alias_tvars,
+                                         module_hidden=module_hidden)
                 self.add_symbol(imported_id, symbol, imp)
             elif module and not missing:
                 # Missing attribute.
@@ -1483,7 +1570,9 @@ class SemanticAnalyzer(NodeVisitor):
             self.add_submodules_to_parent_modules(i_id, True)
             for name, node in m.names.items():
                 node = self.normalize_type_alias(node, i)
-                if not name.startswith('_') and node.module_public:
+                # if '__all__' exists, all nodes not included have had module_public set to
+                # False, and we can skip checking '_' because it's been explicitly included.
+                if node.module_public and (not name.startswith('_') or '__all__' in m.names):
                     existing_symbol = self.globals.get(name)
                     if existing_symbol:
                         # Import can redefine a variable. They get special treatment.
@@ -1506,7 +1595,11 @@ class SemanticAnalyzer(NodeVisitor):
         else:
             var._fullname = self.qualified_name(name)
         var.is_ready = True
-        var.type = AnyType(from_unimported_type=is_import)
+        if is_import:
+            any_type = AnyType(TypeOfAny.from_unimported_type)
+        else:
+            any_type = AnyType(TypeOfAny.from_error)
+        var.type = any_type
         var.is_suppressed_import = is_import
         self.add_symbol(name, SymbolTableNode(GDEF, var, self.cur_mod_id), context)
 
@@ -1529,29 +1622,37 @@ class SemanticAnalyzer(NodeVisitor):
     def type_analyzer(self, *,
                       tvar_scope: Optional[TypeVarScope] = None,
                       allow_tuple_literal: bool = False,
-                      aliasing: bool = False) -> TypeAnalyser:
+                      aliasing: bool = False,
+                      third_pass: bool = False) -> TypeAnalyser:
         if tvar_scope is None:
             tvar_scope = self.tvar_scope
-        return TypeAnalyser(self.lookup_qualified,
+        tpan = TypeAnalyser(self.lookup_qualified,
                             self.lookup_fully_qualified,
                             tvar_scope,
                             self.fail,
+                            self.note,
                             self.plugin,
                             self.options,
                             self.is_typeshed_stub_file,
                             aliasing=aliasing,
                             allow_tuple_literal=allow_tuple_literal,
-                            allow_unnormalized=self.is_stub_file)
+                            allow_unnormalized=self.is_stub_file,
+                            third_pass=third_pass)
+        tpan.in_dynamic_func = bool(self.function_stack and self.function_stack[-1].is_dynamic())
+        tpan.global_scope = not self.type and not self.function_stack
+        return tpan
 
     def anal_type(self, t: Type, *,
                   tvar_scope: Optional[TypeVarScope] = None,
                   allow_tuple_literal: bool = False,
-                  aliasing: bool = False) -> Type:
+                  aliasing: bool = False,
+                  third_pass: bool = False) -> Type:
         if t:
             a = self.type_analyzer(
                 tvar_scope=tvar_scope,
                 aliasing=aliasing,
-                allow_tuple_literal=allow_tuple_literal)
+                allow_tuple_literal=allow_tuple_literal,
+                third_pass=third_pass)
             return t.accept(a)
 
         else:
@@ -1565,10 +1666,16 @@ class SemanticAnalyzer(NodeVisitor):
         if s.type:
             allow_tuple_literal = isinstance(s.lvalues[-1], (TupleExpr, ListExpr))
             s.type = self.anal_type(s.type, allow_tuple_literal=allow_tuple_literal)
+            if (self.type and self.type.is_protocol and isinstance(lval, NameExpr) and
+                    isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs):
+                        if isinstance(lval.node, Var):
+                            lval.node.is_abstract_var = True
         else:
-            # Set the type if the rvalue is a simple literal.
-            if (s.type is None and len(s.lvalues) == 1 and
-                    isinstance(s.lvalues[0], NameExpr)):
+            if (any(isinstance(lv, NameExpr) and lv.is_def for lv in s.lvalues) and
+                    self.type and self.type.is_protocol and not self.is_func_scope()):
+                self.fail('All protocol members must have explicitly declared types', s)
+            # Set the type if the rvalue is a simple literal (even if the above error occurred).
+            if len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr):
                 if s.lvalues[0].is_def:
                     s.type = self.analyze_simple_literal_type(s.rvalue)
         if s.type:
@@ -1631,15 +1738,20 @@ class SemanticAnalyzer(NodeVisitor):
         qualified type variable names for generic aliases.
         If 'allow_unnormalized' is True, allow types like builtins.list[T].
         """
+        dynamic = bool(self.function_stack and self.function_stack[-1].is_dynamic())
+        global_scope = not self.type and not self.function_stack
         res = analyze_type_alias(rvalue,
                                  self.lookup_qualified,
                                  self.lookup_fully_qualified,
                                  self.tvar_scope,
                                  self.fail,
+                                 self.note,
                                  self.plugin,
                                  self.options,
                                  self.is_typeshed_stub_file,
-                                 allow_unnormalized=True)
+                                 allow_unnormalized=True,
+                                 in_dynamic_func=dynamic,
+                                 global_scope=global_scope)
         if res:
             alias_tvars = [name for (name, _) in
                            res.accept(TypeVariableQuery(self.lookup_qualified, self.tvar_scope))]
@@ -1690,7 +1802,7 @@ class SemanticAnalyzer(NodeVisitor):
             node.kind = TYPE_ALIAS
             node.type_override = res
             node.alias_tvars = alias_tvars
-            if isinstance(rvalue, IndexExpr):
+            if isinstance(rvalue, (IndexExpr, CallExpr)):
                 # We only need this for subscripted aliases, since simple aliases
                 # are already processed using aliasing TypeInfo's above.
                 rvalue.analyzed = TypeAliasExpr(res, node.alias_tvars,
@@ -1803,18 +1915,22 @@ class SemanticAnalyzer(NodeVisitor):
 
     def analyze_member_lvalue(self, lval: MemberExpr) -> None:
         lval.accept(self)
-        if (self.is_self_member_ref(lval) and
-                self.type.get(lval.name) is None):
-            # Implicit attribute definition in __init__.
-            lval.is_def = True
-            v = Var(lval.name)
-            v.set_line(lval)
-            v._fullname = self.qualified_name(lval.name)
-            v.info = self.type
-            v.is_ready = False
-            lval.def_var = v
-            lval.node = v
-            self.type.names[lval.name] = SymbolTableNode(MDEF, v, implicit=True)
+        if self.is_self_member_ref(lval):
+            node = self.type.get(lval.name)
+            if node is None or isinstance(node.node, Var) and node.node.is_abstract_var:
+                if self.type.is_protocol and node is None:
+                    self.fail("Protocol members cannot be defined via assignment to self", lval)
+                else:
+                    # Implicit attribute definition in __init__.
+                    lval.is_def = True
+                    v = Var(lval.name)
+                    v.set_line(lval)
+                    v._fullname = self.qualified_name(lval.name)
+                    v.info = self.type
+                    v.is_ready = False
+                    lval.def_var = v
+                    lval.node = v
+                    self.type.names[lval.name] = SymbolTableNode(MDEF, v, implicit=True)
         self.check_lvalue_validity(lval.node, lval)
 
     def is_self_member_ref(self, memberexpr: MemberExpr) -> bool:
@@ -1877,10 +1993,12 @@ class SemanticAnalyzer(NodeVisitor):
             newtype_class_info = self.build_newtype_typeinfo(name, old_type, old_type.fallback)
             newtype_class_info.tuple_type = old_type
         elif isinstance(old_type, Instance):
+            if old_type.type.is_protocol:
+                self.fail("NewType cannot be used with protocol classes", s)
             newtype_class_info = self.build_newtype_typeinfo(name, old_type, old_type)
         else:
             message = "Argument 2 to NewType(...) must be subclassable (got {})"
-            self.fail(message.format(old_type), s)
+            self.fail(message.format(self.msg.format(old_type)), s)
             return
 
         check_for_explicit_any(old_type, self.options, self.is_typeshed_stub_file, self.msg,
@@ -2208,7 +2326,7 @@ class SemanticAnalyzer(NodeVisitor):
                 # The fields argument contains (name, type) tuples.
                 items, types, ok = self.parse_namedtuple_fields_with_types(listexpr.items, call)
         if not types:
-            types = [AnyType() for _ in items]
+            types = [AnyType(TypeOfAny.unannotated) for _ in items]
         underscore = [item for item in items if item.startswith('_')]
         if underscore:
             self.fail("namedtuple() field names cannot start with an underscore: "
@@ -2248,34 +2366,44 @@ class SemanticAnalyzer(NodeVisitor):
         class_def.fullname = self.qualified_name(name)
 
         info = TypeInfo(SymbolTable(), class_def, self.cur_mod_id)
-        info.mro = [info] + basetype_or_fallback.type.mro
+        class_def.info = info
+        mro = basetype_or_fallback.type.mro
+        if mro is None:
+            # Forward reference, MRO should be recalculated in third pass.
+            mro = [basetype_or_fallback.type, self.object_type().type]
+        info.mro = [info] + mro
         info.bases = [basetype_or_fallback]
         return info
 
     def build_namedtuple_typeinfo(self, name: str, items: List[str], types: List[Type],
                                   default_items: Dict[str, Expression]) -> TypeInfo:
         strtype = self.str_type()
-        basetuple_type = self.named_type('__builtins__.tuple', [AnyType()])
-        dictype = (self.named_type_or_none('builtins.dict', [strtype, AnyType()])
+        implicit_any = AnyType(TypeOfAny.special_form)
+        basetuple_type = self.named_type('__builtins__.tuple', [implicit_any])
+        dictype = (self.named_type_or_none('builtins.dict', [strtype, implicit_any])
                    or self.object_type())
         # Actual signature should return OrderedDict[str, Union[types]]
-        ordereddictype = (self.named_type_or_none('builtins.dict', [strtype, AnyType()])
+        ordereddictype = (self.named_type_or_none('builtins.dict', [strtype, implicit_any])
                           or self.object_type())
-        # 'builtins.tuple' has only one type parameter.
-        #
-        # TODO: The corresponding type argument in the fallback instance should be a join of
-        #       all item types, but we can't do joins during this pass of semantic analysis
-        #       and we are using Any as a workaround.
-        fallback = self.named_type('__builtins__.tuple', [AnyType()])
+        fallback = self.named_type('__builtins__.tuple', [implicit_any])
         # Note: actual signature should accept an invariant version of Iterable[UnionType[types]].
         # but it can't be expressed. 'new' and 'len' should be callable types.
-        iterable_type = self.named_type_or_none('typing.Iterable', [AnyType()])
+        iterable_type = self.named_type_or_none('typing.Iterable', [implicit_any])
         function_type = self.named_type('__builtins__.function')
 
         info = self.basic_new_typeinfo(name, fallback)
         info.is_named_tuple = True
         info.tuple_type = TupleType(types, fallback)
 
+        def patch() -> None:
+            # Calculate the correct value type for the fallback Mapping.
+            fallback.args[0] = join.join_type_list(list(info.tuple_type.items))
+
+        # We can't calculate the complete fallback type until after semantic
+        # analysis, since otherwise MROs might be incomplete. Postpone a callback
+        # function that patches the fallback.
+        self.patches.append(patch)
+
         def add_field(var: Var, is_initialized_in_class: bool = False,
                       is_property: bool = False) -> None:
             var.info = info
@@ -2339,10 +2467,11 @@ class SemanticAnalyzer(NodeVisitor):
         add_method('__init__', ret=NoneTyp(), name=info.name(),
                    args=[make_init_arg(var) for var in vars])
         add_method('_asdict', args=[], ret=ordereddictype)
+        special_form_any = AnyType(TypeOfAny.special_form)
         add_method('_make', ret=selftype, is_classmethod=True,
                    args=[Argument(Var('iterable', iterable_type), iterable_type, None, ARG_POS),
-                         Argument(Var('new'), AnyType(), EllipsisExpr(), ARG_NAMED_OPT),
-                         Argument(Var('len'), AnyType(), EllipsisExpr(), ARG_NAMED_OPT)])
+                         Argument(Var('new'), special_form_any, EllipsisExpr(), ARG_NAMED_OPT),
+                         Argument(Var('len'), special_form_any, EllipsisExpr(), ARG_NAMED_OPT)])
         return info
 
     def make_argument(self, name: str, type: Type) -> Argument:
@@ -2355,7 +2484,7 @@ class SemanticAnalyzer(NodeVisitor):
                 result.append(self.anal_type(expr_to_unanalyzed_type(node)))
             except TypeTranslationError:
                 self.fail('Type expected', node)
-                result.append(AnyType())
+                result.append(AnyType(TypeOfAny.from_error))
         return result
 
     def process_typeddict_definition(self, s: AssignmentStmt) -> None:
@@ -2494,19 +2623,18 @@ class SemanticAnalyzer(NodeVisitor):
         fallback = (self.named_type_or_none('typing.Mapping',
                                             [self.str_type(), self.object_type()])
                     or self.object_type())
+        info = self.basic_new_typeinfo(name, fallback)
+        info.typeddict_type = TypedDictType(OrderedDict(zip(items, types)), required_keys,
+                                            fallback)
 
         def patch() -> None:
             # Calculate the correct value type for the fallback Mapping.
-            fallback.args[1] = join.join_type_list(types)
+            fallback.args[1] = join.join_type_list(list(info.typeddict_type.items.values()))
 
         # We can't calculate the complete fallback type until after semantic
         # analysis, since otherwise MROs might be incomplete. Postpone a callback
         # function that patches the fallback.
         self.patches.append(patch)
-
-        info = self.basic_new_typeinfo(name, fallback)
-        info.typeddict_type = TypedDictType(OrderedDict(zip(items, types)), required_keys,
-                                            fallback)
         return info
 
     def check_classvar(self, s: AssignmentStmt) -> None:
@@ -2574,8 +2702,8 @@ class SemanticAnalyzer(NodeVisitor):
             elementwise_assignments = zip(seq_rval.items, *[v.items for v in seq_lvals])
             for rv, *lvs in elementwise_assignments:
                 self.process_module_assignment(lvs, rv, ctx)
-        elif isinstance(rval, NameExpr):
-            rnode = self.lookup(rval.name, ctx)
+        elif isinstance(rval, RefExpr):
+            rnode = self.lookup_type_node(rval)
             if rnode and rnode.kind == MODULE_REF:
                 for lval in lvals:
                     if not isinstance(lval, NameExpr):
@@ -2764,7 +2892,7 @@ class SemanticAnalyzer(NodeVisitor):
                 if len(dec.func.arguments) > 1:
                     self.fail('Too many arguments', dec.func)
             elif refers_to_fullname(d, 'typing.no_type_check'):
-                dec.var.type = AnyType()
+                dec.var.type = AnyType(TypeOfAny.special_form)
                 no_type_check = True
         for i in reversed(removed):
             del dec.decorators[i]
@@ -2861,7 +2989,7 @@ class SemanticAnalyzer(NodeVisitor):
     def visit_try_stmt(self, s: TryStmt) -> None:
         self.analyze_try_stmt(s, self)
 
-    def analyze_try_stmt(self, s: TryStmt, visitor: NodeVisitor,
+    def analyze_try_stmt(self, s: TryStmt, visitor: NodeVisitor[None],
                          add_global: bool = False) -> None:
         s.body.accept(visitor)
         for type, var, handler in zip(s.types, s.vars, s.handlers):
@@ -2993,6 +3121,8 @@ class SemanticAnalyzer(NodeVisitor):
             self.fail('"super" used outside class', expr)
             return
         expr.info = self.type
+        for arg in expr.call.args:
+            arg.accept(self)
 
     def visit_tuple_expr(self, expr: TupleExpr) -> None:
         for item in expr.items:
@@ -3042,6 +3172,8 @@ class SemanticAnalyzer(NodeVisitor):
         Some call expressions are recognized as special forms, including
         cast(...).
         """
+        if expr.analyzed:
+            return
         expr.callee.accept(self)
         if refers_to_fullname(expr.callee, 'typing.cast'):
             # Special form cast(...).
@@ -3146,13 +3278,25 @@ class SemanticAnalyzer(NodeVisitor):
             # bar in its namespace.  This must be done for all types of bar.
             file = cast(Optional[MypyFile], base.node)  # can't use isinstance due to issue #2999
             n = file.names.get(expr.name, None) if file is not None else None
-            if n:
+            if n and not n.module_hidden:
                 n = self.normalize_type_alias(n, expr)
                 if not n:
                     return
                 expr.kind = n.kind
                 expr.fullname = n.fullname
                 expr.node = n.node
+            elif file is not None and file.is_stub and '__getattr__' in file.names:
+                # If there is a module-level __getattr__, then any attribute on the module is valid
+                # per PEP 484.
+                getattr_defn = file.names['__getattr__']
+                if isinstance(getattr_defn.node, FuncDef):
+                    if isinstance(getattr_defn.node.type, CallableType):
+                        typ = getattr_defn.node.type.ret_type
+                    else:
+                        typ = AnyType(TypeOfAny.special_form)
+                    expr.kind = MDEF
+                    expr.fullname = '{}.{}'.format(file.fullname(), expr.name)
+                    expr.node = Var(expr.name, type=typ)
             else:
                 # We only catch some errors here; the rest will be
                 # caught during type checking.
@@ -3220,6 +3364,8 @@ class SemanticAnalyzer(NodeVisitor):
         expr.expr.accept(self)
 
     def visit_index_expr(self, expr: IndexExpr) -> None:
+        if expr.analyzed:
+            return
         expr.base.accept(self)
         if (isinstance(expr.base, RefExpr)
                 and isinstance(expr.base.node, TypeInfo)
@@ -3264,7 +3410,7 @@ class SemanticAnalyzer(NodeVisitor):
         except TypeTranslationError:
             return None
         if isinstance(t, UnboundType):
-            n = self.lookup_qualified(t.name, expr)
+            n = self.lookup_qualified(t.name, expr, suppress_errors=True)
             return n
         return None
 
@@ -3382,23 +3528,25 @@ class SemanticAnalyzer(NodeVisitor):
         yield
         self.tvar_scope = old_scope
 
-    def lookup(self, name: str, ctx: Context) -> SymbolTableNode:
+    def lookup(self, name: str, ctx: Context,
+               suppress_errors: bool = False) -> Optional[SymbolTableNode]:
         """Look up an unqualified name in all active namespaces."""
         implicit_name = False
         # 1a. Name declared using 'global x' takes precedence
         if name in self.global_decls[-1]:
             if name in self.globals:
                 return self.globals[name]
-            else:
+            if not suppress_errors:
                 self.name_not_defined(name, ctx)
-                return None
+            return None
         # 1b. Name declared using 'nonlocal x' takes precedence
         if name in self.nonlocal_decls[-1]:
             for table in reversed(self.locals[:-1]):
                 if table is not None and name in table:
                     return table[name]
             else:
-                self.name_not_defined(name, ctx)
+                if not suppress_errors:
+                    self.name_not_defined(name, ctx)
                 return None
         # 2. Class attributes (if within class definition)
         if self.is_class_scope() and name in self.type.names:
@@ -3421,16 +3569,18 @@ class SemanticAnalyzer(NodeVisitor):
             table = b.node.names
             if name in table:
                 if name[0] == "_" and name[1] != "_":
-                    self.name_not_defined(name, ctx)
+                    if not suppress_errors:
+                        self.name_not_defined(name, ctx)
                     return None
                 node = table[name]
                 return node
         # Give up.
-        if not implicit_name:
+        if not implicit_name and not suppress_errors:
             self.name_not_defined(name, ctx)
             self.check_for_obsolete_short_name(name, ctx)
         else:
-            return implicit_node
+            if implicit_name:
+                return implicit_node
         return None
 
     def check_for_obsolete_short_name(self, name: str, ctx: Context) -> None:
@@ -3440,12 +3590,13 @@ class SemanticAnalyzer(NodeVisitor):
         if len(matches) == 1:
             self.note("(Did you mean '{}'?)".format(obsolete_name_mapping[matches[0]]), ctx)
 
-    def lookup_qualified(self, name: str, ctx: Context) -> SymbolTableNode:
+    def lookup_qualified(self, name: str, ctx: Context,
+                         suppress_errors: bool = False) -> Optional[SymbolTableNode]:
         if '.' not in name:
-            return self.lookup(name, ctx)
+            return self.lookup(name, ctx, suppress_errors=suppress_errors)
         else:
             parts = name.split('.')
-            n = self.lookup(parts[0], ctx)  # type: SymbolTableNode
+            n = self.lookup(parts[0], ctx, suppress_errors=suppress_errors)
             if n:
                 for i in range(1, len(parts)):
                     if isinstance(n.node, TypeInfo):
@@ -3468,17 +3619,22 @@ class SemanticAnalyzer(NodeVisitor):
                         n = n.node.names.get(parts[i], None)
                     # TODO: What if node is Var or FuncDef?
                     if not n:
-                        self.name_not_defined(name, ctx)
+                        if not suppress_errors:
+                            self.name_not_defined(name, ctx)
                         break
                 if n:
                     n = self.normalize_type_alias(n, ctx)
-            return n
+                    if n and n.module_hidden:
+                        self.name_not_defined(name, ctx)
+            if n and not n.module_hidden:
+                return n
+            return None
 
     def builtin_type(self, fully_qualified_name: str) -> Instance:
         sym = self.lookup_fully_qualified(fully_qualified_name)
         node = sym.node
         assert isinstance(node, TypeInfo)
-        return Instance(node, [AnyType()] * len(node.defn.type_vars))
+        return Instance(node, [AnyType(TypeOfAny.special_form)] * len(node.defn.type_vars))
 
     def lookup_fully_qualified(self, name: str) -> SymbolTableNode:
         """Lookup a fully qualified name.
@@ -3647,7 +3803,7 @@ class SemanticAnalyzer(NodeVisitor):
             report_internal_error(err, self.errors.file, node.line, self.errors, self.options)
 
 
-class FirstPass(NodeVisitor):
+class FirstPass(NodeVisitor[None]):
     """First phase of semantic analysis.
 
     See docstring of 'analyze()' below for a description of what this does.
@@ -3711,7 +3867,7 @@ class FirstPass(NodeVisitor):
                     ('None', NoneTyp()),
                     # reveal_type is a mypy-only function that gives an error with
                     # the type of its arg.
-                    ('reveal_type', AnyType()),
+                    ('reveal_type', AnyType(TypeOfAny.special_form)),
                 ]  # type: List[Tuple[str, Type]]
 
                 # TODO(ddfisher): This guard is only needed because mypy defines
@@ -3725,6 +3881,11 @@ class FirstPass(NodeVisitor):
                         ('False', bool_type),
                         ('__debug__', bool_type),
                     ])
+                else:
+                    # We are running tests without 'bool' in builtins.
+                    # TODO: Find a permanent solution to this problem.
+                    # Maybe add 'bool' to all fixtures?
+                    literal_types.append(('True', AnyType(TypeOfAny.special_form)))
 
                 for name, typ in literal_types:
                     v = Var(name, typ)
@@ -3918,14 +4079,20 @@ class ThirdPass(TraverserVisitor):
     straightforward type inference.
     """
 
-    def __init__(self, modules: Dict[str, MypyFile], errors: Errors) -> None:
+    def __init__(self, modules: Dict[str, MypyFile], errors: Errors,
+                 sem: SemanticAnalyzer) -> None:
         self.modules = modules
         self.errors = errors
+        self.sem = sem
 
-    def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None:
+    def visit_file(self, file_node: MypyFile, fnam: str, options: Options,
+                   patches: List[Callable[[], None]]) -> None:
         self.errors.set_file(fnam, file_node.fullname())
         self.options = options
+        self.sem.options = options
+        self.patches = patches
         self.is_typeshed_file = self.errors.is_typeshed_file(fnam)
+        self.sem.globals = file_node.names
         with experiments.strict_optional_set(options.strict_optional):
             self.accept(file_node)
 
@@ -3955,7 +4122,7 @@ class ThirdPass(TraverserVisitor):
 
     def visit_func_def(self, fdef: FuncDef) -> None:
         self.errors.push_function(fdef.name())
-        self.analyze(fdef.type)
+        self.analyze(fdef.type, fdef)
         super().visit_func_def(fdef)
         self.errors.pop_function()
 
@@ -3963,19 +4130,39 @@ class ThirdPass(TraverserVisitor):
         # NamedTuple base classes are validated in check_namedtuple_classdef; we don't have to
         # check them again here.
         if not tdef.info.is_named_tuple:
+            types = list(tdef.info.bases)  # type: List[Type]
+            for tvar in tdef.type_vars:
+                if tvar.upper_bound:
+                    types.append(tvar.upper_bound)
+                if tvar.values:
+                    types.extend(tvar.values)
+            self.analyze_types(types, tdef.info)
             for type in tdef.info.bases:
-                self.analyze(type)
+                if tdef.info.is_protocol:
+                    if not isinstance(type, Instance) or not type.type.is_protocol:
+                        if type.type.fullname() != 'builtins.object':
+                            self.fail('All bases of a protocol must be protocols', tdef)
         # Recompute MRO now that we have analyzed all modules, to pick
         # up superclasses of bases imported from other modules in an
         # import loop. (Only do so if we succeeded the first time.)
         if tdef.info.mro:
             tdef.info.mro = []  # Force recomputation
             calculate_class_mro(tdef, self.fail_blocker)
+            if tdef.info.is_protocol:
+                add_protocol_members(tdef.info)
         if tdef.analyzed is not None:
+            # Also check synthetic types associated with this ClassDef.
+            # Currently these are TypedDict, and NamedTuple.
             if isinstance(tdef.analyzed, TypedDictExpr):
-                self.analyze(tdef.analyzed.info.typeddict_type)
+                self.analyze(tdef.analyzed.info.typeddict_type, tdef.analyzed, warn=True)
             elif isinstance(tdef.analyzed, NamedTupleExpr):
-                self.analyze(tdef.analyzed.info.tuple_type)
+                self.analyze(tdef.analyzed.info.tuple_type, tdef.analyzed, warn=True)
+                for name in tdef.analyzed.info.names:
+                    sym = tdef.analyzed.info.names[name]
+                    if isinstance(sym.node, (FuncDef, Decorator)):
+                        self.accept(sym.node)
+                    if isinstance(sym.node, Var):
+                        self.analyze(sym.node.type, sym.node)
         super().visit_class_def(tdef)
 
     def visit_decorator(self, dec: Decorator) -> None:
@@ -3993,10 +4180,10 @@ class ThirdPass(TraverserVisitor):
             # Decorators are expected to have a callable type (it's a little odd).
             if dec.func.type is None:
                 dec.var.type = CallableType(
-                    [AnyType()],
+                    [AnyType(TypeOfAny.special_form)],
                     [ARG_POS],
                     [None],
-                    AnyType(),
+                    AnyType(TypeOfAny.special_form),
                     self.builtin_type('function'),
                     name=dec.var.name())
             elif isinstance(dec.func.type, CallableType):
@@ -4016,10 +4203,11 @@ class ThirdPass(TraverserVisitor):
             # of the function here.
             dec.var.type = function_type(dec.func, self.builtin_type('function'))
         if dec.decorators:
-            if returns_any_if_called(dec.decorators[0]):
+            return_type = calculate_return_type(dec.decorators[0])
+            if return_type and isinstance(return_type, AnyType):
                 # The outermost decorator will return Any so we know the type of the
                 # decorated function.
-                dec.var.type = AnyType()
+                dec.var.type = AnyType(TypeOfAny.from_another_any, source_any=return_type)
             sig = find_fixed_callable_return(dec.decorators[0])
             if sig:
                 # The outermost decorator always returns the same kind of function,
@@ -4029,20 +4217,60 @@ class ThirdPass(TraverserVisitor):
                 dec.var.type = sig
 
     def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
-        self.analyze(s.type)
+        """Traverse the assignment statement.
+
+        This includes the actual assignment and synthetic types
+        resulted from this assignment (if any). Currently this includes
+        NewType, TypedDict, NamedTuple, and TypeVar.
+        """
+        self.analyze(s.type, s)
         if isinstance(s.rvalue, IndexExpr) and isinstance(s.rvalue.analyzed, TypeAliasExpr):
-            self.analyze(s.rvalue.analyzed.type)
+            self.analyze(s.rvalue.analyzed.type, s.rvalue.analyzed, warn=True)
         if isinstance(s.rvalue, CallExpr):
-            if isinstance(s.rvalue.analyzed, NewTypeExpr):
-                self.analyze(s.rvalue.analyzed.old_type)
-            if isinstance(s.rvalue.analyzed, TypedDictExpr):
-                self.analyze(s.rvalue.analyzed.info.typeddict_type)
-            if isinstance(s.rvalue.analyzed, NamedTupleExpr):
-                self.analyze(s.rvalue.analyzed.info.tuple_type)
+            analyzed = s.rvalue.analyzed
+            if isinstance(analyzed, NewTypeExpr):
+                self.analyze(analyzed.old_type, analyzed)
+                if analyzed.info and analyzed.info.mro:
+                    analyzed.info.mro = []  # Force recomputation
+                    calculate_class_mro(analyzed.info.defn, self.fail_blocker)
+            if isinstance(analyzed, TypeVarExpr):
+                types = []
+                if analyzed.upper_bound:
+                    types.append(analyzed.upper_bound)
+                if analyzed.values:
+                    types.extend(analyzed.values)
+                self.analyze_types(types, analyzed)
+            if isinstance(analyzed, TypedDictExpr):
+                self.analyze(analyzed.info.typeddict_type, analyzed, warn=True)
+            if isinstance(analyzed, NamedTupleExpr):
+                self.analyze(analyzed.info.tuple_type, analyzed, warn=True)
+                for name in analyzed.info.names:
+                    sym = analyzed.info.names[name]
+                    if isinstance(sym.node, (FuncDef, Decorator)):
+                        self.accept(sym.node)
+                    if isinstance(sym.node, Var):
+                        self.analyze(sym.node.type, sym.node)
+        # We need to pay additional attention to assignments that define a type alias.
+        # The resulting type is also stored in the 'type_override' attribute of
+        # the corresponding SymbolTableNode.
+        if isinstance(s.lvalues[0], RefExpr) and isinstance(s.lvalues[0].node, Var):
+            self.analyze(s.lvalues[0].node.type, s.lvalues[0].node)
+            if isinstance(s.lvalues[0], NameExpr):
+                node = self.sem.lookup(s.lvalues[0].name, s, suppress_errors=True)
+                if node:
+                    self.analyze(node.type_override, node)
         super().visit_assignment_stmt(s)
 
+    def visit_for_stmt(self, s: ForStmt) -> None:
+        self.analyze(s.index_type, s)
+        super().visit_for_stmt(s)
+
+    def visit_with_stmt(self, s: WithStmt) -> None:
+        self.analyze(s.target_type, s)
+        super().visit_with_stmt(s)
+
     def visit_cast_expr(self, e: CastExpr) -> None:
-        self.analyze(e.type)
+        self.analyze(e.type, e)
         super().visit_cast_expr(e)
 
     def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None:
@@ -4050,23 +4278,116 @@ class ThirdPass(TraverserVisitor):
 
     def visit_type_application(self, e: TypeApplication) -> None:
         for type in e.types:
-            self.analyze(type)
+            self.analyze(type, e)
         super().visit_type_application(e)
 
     # Helpers
 
-    def analyze(self, type: Optional[Type]) -> None:
+    def perform_transform(self, node: Union[Node, SymbolTableNode],
+                          transform: Callable[[Type], Type]) -> None:
+        """Apply transform to all types associated with node."""
+        if isinstance(node, ForStmt):
+            node.index_type = transform(node.index_type)
+            self.transform_types_in_lvalue(node.index, transform)
+        if isinstance(node, WithStmt):
+            node.target_type = transform(node.target_type)
+            for n in node.target:
+                if isinstance(n, NameExpr) and isinstance(n.node, Var) and n.node.type:
+                    n.node.type = transform(n.node.type)
+        if isinstance(node, (FuncDef, CastExpr, AssignmentStmt, TypeAliasExpr, Var)):
+            node.type = transform(node.type)
+        if isinstance(node, NewTypeExpr):
+            node.old_type = transform(node.old_type)
+        if isinstance(node, TypeVarExpr):
+            if node.upper_bound:
+                node.upper_bound = transform(node.upper_bound)
+            if node.values:
+                node.values = [transform(v) for v in node.values]
+        if isinstance(node, TypedDictExpr):
+            node.info.typeddict_type = cast(TypedDictType,
+                                            transform(node.info.typeddict_type))
+        if isinstance(node, NamedTupleExpr):
+            node.info.tuple_type = cast(TupleType,
+                                        transform(node.info.tuple_type))
+        if isinstance(node, TypeApplication):
+            node.types = [transform(t) for t in node.types]
+        if isinstance(node, SymbolTableNode):
+            node.type_override = transform(node.type_override)
+        if isinstance(node, TypeInfo):
+            for tvar in node.defn.type_vars:
+                if tvar.upper_bound:
+                    tvar.upper_bound = transform(tvar.upper_bound)
+                if tvar.values:
+                    tvar.values = [transform(v) for v in tvar.values]
+            new_bases = []
+            for base in node.bases:
+                new_base = transform(base)
+                if isinstance(new_base, Instance):
+                    new_bases.append(new_base)
+                else:
+                    # Don't fix the NamedTuple bases, they are Instance's intentionally.
+                    # Patch the 'args' just in case, although generic tuple type are
+                    # not supported yet.
+                    alt_base = Instance(base.type, [transform(a) for a in base.args])
+                    new_bases.append(alt_base)
+            node.bases = new_bases
+
+    def transform_types_in_lvalue(self, lvalue: Lvalue,
+                                  transform: Callable[[Type], Type]) -> None:
+        if isinstance(lvalue, RefExpr):
+            if isinstance(lvalue.node, Var):
+                var = lvalue.node
+                var.type = transform(var.type)
+        elif isinstance(lvalue, TupleExpr):
+            for item in lvalue.items:
+                self.transform_types_in_lvalue(item, transform)
+
+    def analyze(self, type: Optional[Type], node: Union[Node, SymbolTableNode],
+                warn: bool = False) -> None:
+        # Recursive type warnings are only emitted on type definition 'node's, marked by 'warn'
+        # Flags appeared during analysis of 'type' are collected in this dict.
+        indicator = {}  # type: Dict[str, bool]
         if type:
-            analyzer = TypeAnalyserPass3(self.fail, self.options, self.is_typeshed_file)
+            analyzer = self.make_type_analyzer(indicator)
+            type.accept(analyzer)
+            self.check_for_omitted_generics(type)
+            if indicator.get('forward') or indicator.get('synthetic'):
+                def patch() -> None:
+                    self.perform_transform(node,
+                        lambda tp: tp.accept(ForwardReferenceResolver(self.fail,
+                                                                      node, warn)))
+                self.patches.append(patch)
+
+    def analyze_types(self, types: List[Type], node: Node) -> None:
+        # Similar to above but for nodes with multiple types.
+        indicator = {}  # type: Dict[str, bool]
+        for type in types:
+            analyzer = self.make_type_analyzer(indicator)
             type.accept(analyzer)
             self.check_for_omitted_generics(type)
+        if indicator.get('forward') or indicator.get('synthetic'):
+            def patch() -> None:
+                self.perform_transform(node,
+                    lambda tp: tp.accept(ForwardReferenceResolver(self.fail,
+                                                                  node, warn=False)))
+            self.patches.append(patch)
+
+    def make_type_analyzer(self, indicator: Dict[str, bool]) -> TypeAnalyserPass3:
+        return TypeAnalyserPass3(self.sem.lookup_qualified,
+                                 self.sem.lookup_fully_qualified,
+                                 self.fail,
+                                 self.sem.note,
+                                 self.sem.plugin,
+                                 self.options,
+                                 self.is_typeshed_file,
+                                 indicator)
 
     def check_for_omitted_generics(self, typ: Type) -> None:
         if 'generics' not in self.options.disallow_any or self.is_typeshed_file:
             return
 
         for t in collect_any_types(typ):
-            if t.from_omitted_generics:
+            if t.type_of_any == TypeOfAny.from_omitted_generics:
                 self.fail(messages.BARE_GENERIC, t)
 
     def fail(self, msg: str, ctx: Context, *, blocker: bool = False) -> None:
@@ -4083,7 +4404,18 @@ class ThirdPass(TraverserVisitor):
         if args:
             # TODO: assert len(args) == len(node.defn.type_vars)
             return Instance(node, args)
-        return Instance(node, [AnyType()] * len(node.defn.type_vars))
+        any_type = AnyType(TypeOfAny.special_form)
+        return Instance(node, [any_type] * len(node.defn.type_vars))
+
+
+def add_protocol_members(typ: TypeInfo) -> None:
+    members = set()  # type: Set[str]
+    if typ.mro:
+        for base in typ.mro[:-1]:  # we skip "object" since everyone implements it
+            if base.is_protocol:
+                for name in base.names:
+                    members.add(name)
+    typ.protocol_members = sorted(list(members))
 
 
 def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike:
@@ -4258,8 +4590,7 @@ def consider_sys_version_info(expr: Expression, pyversion: Tuple[int, ...]) -> i
         else:
             return TRUTH_VALUE_UNKNOWN
     elif isinstance(index, tuple) and isinstance(thing, tuple):
-        # Why doesn't mypy see that index can't be None here?
-        lo, hi = cast(tuple, index)
+        lo, hi = index
         if lo is None:
             lo = 0
         if hi is None:
@@ -4335,7 +4666,7 @@ def contains_int_or_tuple_of_ints(expr: Expression
     if isinstance(expr, IntExpr):
         return expr.value
     if isinstance(expr, TupleExpr):
-        if expr.literal == LITERAL_YES:
+        if literal(expr) == LITERAL_YES:
             thing = []
             for x in expr.items:
                 if not isinstance(x, IntExpr):
@@ -4425,11 +4756,11 @@ def is_identity_signature(sig: Type) -> bool:
     return False
 
 
-def returns_any_if_called(expr: Expression) -> bool:
-    """Return True if we can predict that expr will return Any if called.
+def calculate_return_type(expr: Expression) -> Optional[Type]:
+    """Return the return type if we can calculate it.
 
     This only uses information available during semantic analysis so this
-    will sometimes return False because of insufficient information (as
+    will sometimes return None because of insufficient information (as
     type inference hasn't run yet).
     """
     if isinstance(expr, RefExpr):
@@ -4437,15 +4768,16 @@ def returns_any_if_called(expr: Expression) -> bool:
             typ = expr.node.type
             if typ is None:
                 # No signature -> default to Any.
-                return True
+                return AnyType(TypeOfAny.unannotated)
             # Explicit Any return?
-            return isinstance(typ, CallableType) and isinstance(typ.ret_type, AnyType)
+            if isinstance(typ, CallableType):
+                return typ.ret_type
+            return None
         elif isinstance(expr.node, Var):
-            typ = expr.node.type
-            return typ is None or isinstance(typ, AnyType)
+            return expr.node.type
     elif isinstance(expr, CallExpr):
-        return returns_any_if_called(expr.callee)
-    return False
+        return calculate_return_type(expr.callee)
+    return None
 
 
 def find_fixed_callable_return(expr: Expression) -> Optional[CallableType]:
@@ -4471,4 +4803,138 @@ def make_any_non_explicit(t: Type) -> Type:
 
 class MakeAnyNonExplicit(TypeTranslator):
     def visit_any(self, t: AnyType) -> Type:
-        return t.copy_modified(explicit=False)
+        if t.type_of_any == TypeOfAny.explicit:
+            return t.copy_modified(TypeOfAny.special_form)
+        return t
+
+
+class ForwardReferenceResolver(TypeTranslator):
+    """Visitor to replace previously detected forward reference to synthetic types.
+
+    This is similar to TypeTranslator but tracks visited nodes to avoid
+    infinite recursion on potentially circular (self- or mutually-referential) types.
+    This visitor:
+    * Fixes forward references by unwrapping the linked type.
+    * Generates errors for unsupported type recursion and breaks recursion by resolving
+      recursive back references to Any types.
+    * Replaces instance types generated from unanalyzed NamedTuple and TypedDict class syntax
+      found in first pass with analyzed TupleType and TypedDictType.
+    """
+    def __init__(self, fail: Callable[[str, Context], None],
+                 start: Union[Node, SymbolTableNode], warn: bool) -> None:
+        self.seen = []  # type: List[Type]
+        self.fail = fail
+        self.start = start
+        self.warn = warn
+
+    def check_recursion(self, t: Type) -> bool:
+        if any(t is s for s in self.seen):
+            if self.warn:
+                assert isinstance(self.start, Node), "Internal error: invalid error context"
+                self.fail('Recursive types not fully supported yet,'
+                          ' nested types replaced with "Any"', self.start)
+            return True
+        self.seen.append(t)
+        return False
+
+    def visit_forwardref_type(self, t: ForwardRef) -> Type:
+        """This visitor method tracks situations like this:
+
+            x: A  # This type is not yet known and therefore wrapped in ForwardRef,
+                  # its content is updated in ThirdPass, now we need to unwrap this type.
+            A = NewType('A', int)
+        """
+        return t.link.accept(self)
+
+    def visit_instance(self, t: Instance, from_fallback: bool = False) -> Type:
+        """This visitor method tracks situations like this:
+
+               x: A  # When analyzing this type we will get an Instance from FirstPass.
+                     # Now we need to update this to actual analyzed TupleType.
+               class A(NamedTuple):
+                   attr: str
+
+        If from_fallback is True, then we always return an Instance type. This is needed
+        since TupleType and TypedDictType fallbacks are always instances.
+        """
+        info = t.type
+        # Special case, analyzed bases transformed the type into TupleType.
+        if info.tuple_type and not from_fallback:
+            items = [it.accept(self) for it in info.tuple_type.items]
+            info.tuple_type.items = items
+            return TupleType(items, Instance(info, []))
+        # Update forward Instances to corresponding analyzed NamedTuples.
+        if info.replaced and info.replaced.tuple_type:
+            tp = info.replaced.tuple_type
+            if self.check_recursion(tp):
+                # The key idea is that when we recursively return to a type already traversed,
+                # then we break the cycle and put AnyType as a leaf.
+                return AnyType(TypeOfAny.from_error)
+            return tp.copy_modified(fallback=Instance(info.replaced, [])).accept(self)
+        # Same as above but for TypedDicts.
+        if info.replaced and info.replaced.typeddict_type:
+            td = info.replaced.typeddict_type
+            if self.check_recursion(td):
+                # We also break the cycles for TypedDicts as explained above for NamedTuples.
+                return AnyType(TypeOfAny.from_error)
+            return td.copy_modified(fallback=Instance(info.replaced, [])).accept(self)
+        if self.check_recursion(t):
+            # We also need to break a potential cycle with normal (non-synthetic) instance types.
+            return Instance(t.type, [AnyType(TypeOfAny.from_error)] * len(t.type.defn.type_vars))
+        return super().visit_instance(t)
+
+    def visit_type_var(self, t: TypeVarType) -> Type:
+        if self.check_recursion(t):
+            return AnyType(TypeOfAny.from_error)
+        if t.upper_bound:
+            t.upper_bound = t.upper_bound.accept(self)
+        if t.values:
+            t.values = [v.accept(self) for v in t.values]
+        return t
+
+    def visit_callable_type(self, t: CallableType) -> Type:
+        if self.check_recursion(t):
+            return AnyType(TypeOfAny.from_error)
+        arg_types = [tp.accept(self) for tp in t.arg_types]
+        ret_type = t.ret_type.accept(self)
+        variables = t.variables.copy()
+        for v in variables:
+            if v.upper_bound:
+                v.upper_bound = v.upper_bound.accept(self)
+            if v.values:
+                v.values = [val.accept(self) for val in v.values]
+        return t.copy_modified(arg_types=arg_types, ret_type=ret_type, variables=variables)
+
+    def visit_overloaded(self, t: Overloaded) -> Type:
+        if self.check_recursion(t):
+            return AnyType(TypeOfAny.from_error)
+        return super().visit_overloaded(t)
+
+    def visit_tuple_type(self, t: TupleType) -> Type:
+        if self.check_recursion(t):
+            return AnyType(TypeOfAny.from_error)
+        items = [it.accept(self) for it in t.items]
+        fallback = self.visit_instance(t.fallback, from_fallback=True)
+        assert isinstance(fallback, Instance)
+        return TupleType(items, fallback, t.line, t.column)
+
+    def visit_typeddict_type(self, t: TypedDictType) -> Type:
+        if self.check_recursion(t):
+            return AnyType(TypeOfAny.from_error)
+        items = OrderedDict([
+            (item_name, item_type.accept(self))
+            for (item_name, item_type) in t.items.items()
+        ])
+        fallback = self.visit_instance(t.fallback, from_fallback=True)
+        assert isinstance(fallback, Instance)
+        return TypedDictType(items, t.required_keys, fallback, t.line, t.column)
+
+    def visit_union_type(self, t: UnionType) -> Type:
+        if self.check_recursion(t):
+            return AnyType(TypeOfAny.from_error)
+        return super().visit_union_type(t)
+
+    def visit_type_type(self, t: TypeType) -> Type:
+        if self.check_recursion(t):
+            return AnyType(TypeOfAny.from_error)
+        return super().visit_type_type(t)
diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py
index 9da9056..422f46a 100644
--- a/mypy/server/astdiff.py
+++ b/mypy/server/astdiff.py
@@ -37,7 +37,7 @@ def compare_symbol_tables(name_prefix: str, table1: SymbolTable, table2: SymbolT
             node1 = table1[name].node
             node2 = table2[name].node
 
-            if node1.fullname() and get_prefix(node1.fullname()) != name_prefix:
+            if node1 and node1.fullname() and get_prefix(node1.fullname()) != name_prefix:
                 # Only look inside things defined in the current module.
                 # TODO: This probably doesn't work generally...
                 continue
@@ -61,7 +61,7 @@ def is_similar_node_shallow(n: SymbolTableNode, m: SymbolTableNode) -> bool:
         return False
     if type(n.node) != type(m.node):  # noqa
         return False
-    if n.node.fullname() != m.node.fullname():
+    if n.node and m.node and n.node.fullname() != m.node.fullname():
         return False
     if isinstance(n.node, FuncBase) and isinstance(m.node, FuncBase):
         # TODO: info
@@ -83,7 +83,10 @@ def is_similar_node_shallow(n: SymbolTableNode, m: SymbolTableNode) -> bool:
                 nn.is_newtype == mn.is_newtype and
                 is_same_mro(nn.mro, mn.mro))
     if isinstance(n.node, Var) and isinstance(m.node, Var):
-        return is_identical_type(n.node.type, m.node.type)
+        if n.node.type is None and m.node.type is None:
+            return True
+        return (n.node.type is not None and m.node.type is not None and
+                is_identical_type(n.node.type, m.node.type))
     return True
 
 
diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py
index 5591c9a..e4efc62 100644
--- a/mypy/server/astmerge.py
+++ b/mypy/server/astmerge.py
@@ -3,7 +3,7 @@
 See the main entry point merge_asts for details.
 """
 
-from typing import Dict, List, cast, TypeVar
+from typing import Dict, List, cast, TypeVar, Optional
 
 from mypy.nodes import (
     Node, MypyFile, SymbolTable, Block, AssignmentStmt, NameExpr, MemberExpr, RefExpr, TypeInfo,
@@ -40,14 +40,15 @@ def merge_asts(old: MypyFile, old_symbols: SymbolTable,
 
 def replacement_map_from_symbol_table(
         old: SymbolTable, new: SymbolTable, prefix: str) -> Dict[SymbolNode, SymbolNode]:
-    replacements = {}
+    replacements = {}  # type: Dict[SymbolNode, SymbolNode]
     for name, node in old.items():
         if (name in new and (node.kind == MDEF
-                             or get_prefix(node.node.fullname()) == prefix)):
+                             or node.node and get_prefix(node.node.fullname()) == prefix)):
             new_node = new[name]
             if (type(new_node.node) == type(node.node)  # noqa
-                    and new_node.node.fullname() == node.node.fullname()
-                    and new_node.kind == node.kind):
+                    and new_node.node and node.node and
+                    new_node.node.fullname() == node.node.fullname() and
+                    new_node.kind == node.kind):
                 replacements[new_node.node] = node.node
                 if isinstance(node.node, TypeInfo) and isinstance(new_node.node, TypeInfo):
                     type_repl = replacement_map_from_symbol_table(
@@ -119,7 +120,8 @@ class NodeReplaceVisitor(TraverserVisitor):
         super().visit_member_expr(node)
 
     def visit_ref_expr(self, node: RefExpr) -> None:
-        node.node = self.fixup(node.node)
+        if node.node is not None:
+            node.node = self.fixup(node.node)
 
     # Helpers
 
@@ -208,7 +210,7 @@ class TypeReplaceVisitor(TypeVisitor[None]):
 def replace_nodes_in_symbol_table(symbols: SymbolTable,
                                   replacements: Dict[SymbolNode, SymbolNode]) -> None:
     for name, node in symbols.items():
-        if node.node in replacements:
+        if node.node and node.node in replacements:
             new = replacements[node.node]
             new.__dict__ = node.node.__dict__
             node.node = new
diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py
index dd7ae96..62df86a 100644
--- a/mypy/server/aststrip.py
+++ b/mypy/server/aststrip.py
@@ -1,7 +1,7 @@
 """Strip AST from semantic information."""
 
 import contextlib
-from typing import Union, Iterator
+from typing import Union, Iterator, Optional
 
 from mypy.nodes import (
     Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt,
@@ -16,7 +16,7 @@ def strip_target(node: Union[MypyFile, FuncItem]) -> None:
 
 class NodeStripVisitor(TraverserVisitor):
     def __init__(self) -> None:
-        self.type = None  # type: TypeInfo
+        self.type = None  # type: Optional[TypeInfo]
 
     def strip_target(self, node: Union[MypyFile, FuncItem]) -> None:
         """Strip a fine-grained incremental mode target."""
@@ -64,16 +64,20 @@ class NodeStripVisitor(TraverserVisitor):
     def visit_member_expr(self, node: MemberExpr) -> None:
         self.strip_ref_expr(node)
         if self.is_duplicate_attribute_def(node):
-            # This is marked as a instance variable definition but a base class
+            # This is marked as an instance variable definition but a base class
             # defines an attribute with the same name, and we can't have
             # multiple definitions for an attribute. Defer to the base class
             # definition.
-            del self.type.names[node.name]
+            if self.type is not None:
+                del self.type.names[node.name]
             node.is_def = False
             node.def_var = None
 
     def is_duplicate_attribute_def(self, node: MemberExpr) -> bool:
-        if not node.is_def or node.name not in self.type.names:
+        if not node.is_def:
+            return False
+        assert self.type is not None, "Internal error: Member defined outside class"
+        if node.name not in self.type.names:
             return False
         return any(info.get(node.name) is not None for info in self.type.mro[1:])
 
diff --git a/mypy/server/deps.py b/mypy/server/deps.py
index d167b86..0402a51 100644
--- a/mypy/server/deps.py
+++ b/mypy/server/deps.py
@@ -1,6 +1,6 @@
 """Generate fine-grained dependencies for AST nodes."""
 
-from typing import Dict, List, Set
+from typing import Dict, List, Set, Optional
 
 from mypy.checkmember import bind_self
 from mypy.nodes import (
@@ -11,7 +11,7 @@ from mypy.traverser import TraverserVisitor
 from mypy.types import (
     Type, Instance, AnyType, NoneTyp, TypeVisitor, CallableType, DeletedType, PartialType,
     TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType,
-    FunctionLike
+    FunctionLike, ForwardRef
 )
 from mypy.server.trigger import make_trigger
 
@@ -121,15 +121,17 @@ class DependencyVisitor(TraverserVisitor):
             # We don't track depdendencies to local variables, since they
             # aren't externally visible.
             return
-        trigger = make_trigger(o.fullname)
-        self.add_dependency(trigger)
+        if o.fullname is not None:
+            trigger = make_trigger(o.fullname)
+            self.add_dependency(trigger)
 
     def visit_member_expr(self, e: MemberExpr) -> None:
         super().visit_member_expr(e)
         if e.kind is not None:
             # Reference to a module attribute
-            trigger = make_trigger(e.fullname)
-            self.add_dependency(trigger)
+            if e.fullname is not None:
+                trigger = make_trigger(e.fullname)
+                self.add_dependency(trigger)
         else:
             # Reference to a non-module attribute
             typ = self.type_map[e.expr]
@@ -151,7 +153,7 @@ class DependencyVisitor(TraverserVisitor):
 
     # Helpers
 
-    def add_dependency(self, trigger: str, target: str = None) -> None:
+    def add_dependency(self, trigger: str, target: Optional[str] = None) -> None:
         if target is None:
             target = self.current()
         self.map.setdefault(trigger, set()).add(target)
@@ -210,6 +212,9 @@ class TypeDependenciesVisitor(TypeVisitor[List[str]]):
         # TODO: replace with actual implementation
         return []
 
+    def visit_forwardref_type(self, typ: ForwardRef) -> List[str]:
+        return get_type_dependencies(typ.link)
+
     def visit_type_var(self, typ: TypeVarType) -> List[str]:
         # TODO: replace with actual implementation
         return []
diff --git a/mypy/server/update.py b/mypy/server/update.py
index 8394707..c8dc70b 100644
--- a/mypy/server/update.py
+++ b/mypy/server/update.py
@@ -46,7 +46,7 @@ Major todo items:
 - Support multiple type checking passes
 """
 
-from typing import Dict, List, Set, Tuple, Iterable, Union
+from typing import Dict, List, Set, Tuple, Iterable, Union, Optional
 
 from mypy.build import BuildManager, State
 from mypy.checker import DeferredNode
@@ -154,6 +154,7 @@ def build_incremental_step(manager: BuildManager,
     # TODO: state.write_cache()?
     # TODO: state.mark_as_rechecked()?
 
+    assert state.tree is not None, "file must be at least parsed"
     return {id: state.tree}, {id: state.type_checker.type_map}
 
 
@@ -350,7 +351,7 @@ def get_enclosing_namespace_types(nodes: List[DeferredNode]) -> Dict[NamespaceNo
     for deferred in nodes:
         info = deferred.active_typeinfo
         if info:
-            target = info  # type: NamespaceNode
+            target = info  # type: Optional[NamespaceNode]
         elif isinstance(deferred.node, MypyFile):
             target = deferred.node
         else:
@@ -358,7 +359,7 @@ def get_enclosing_namespace_types(nodes: List[DeferredNode]) -> Dict[NamespaceNo
         if target and target not in types:
             local_types = {name: node.node.type
                          for name, node in target.names.items()
-                         if isinstance(node.node, Var)}
+                         if isinstance(node.node, Var) and node.node.type}
             types[target] = local_types
     return types
 
@@ -369,7 +370,7 @@ def get_triggered_namespace_items(old_types_map: Dict[NamespaceNode, Dict[str, T
         for name, node in namespace_node.names.items():
             if (name in old_types and
                     (not isinstance(node.node, Var) or
-                     not is_identical_type(node.node.type, old_types[name]))):
+                     node.node.type and not is_identical_type(node.node.type, old_types[name]))):
                 # Type checking a method changed an attribute type.
                 new_triggered.add(make_trigger('{}.{}'.format(namespace_node.fullname(), name)))
     return new_triggered
@@ -397,8 +398,8 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod
         components = rest.split('.')
     else:
         components = []
-    node = modules[module]  # type: SymbolNode
-    file = None  # type: MypyFile
+    node = modules[module]  # type: Optional[SymbolNode]
+    file = None  # type: Optional[MypyFile]
     active_class = None
     active_class_name = None
     for c in components:
@@ -415,6 +416,7 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod
         # within it.  To get the body we include the entire surrounding target,
         # typically a module top-level, since we don't support processing class
         # bodies as separate entitites for simplicity.
+        assert file is not None
         result = [DeferredNode(file, None, None)]
         for name, symnode in node.names.items():
             node = symnode.node
diff --git a/mypy/sharedparse.py b/mypy/sharedparse.py
index 157bb3f..1b3e5a3 100644
--- a/mypy/sharedparse.py
+++ b/mypy/sharedparse.py
@@ -1,4 +1,4 @@
-from typing import Union, Tuple
+from typing import Optional
 
 """Shared logic between our three mypy parser files."""
 
@@ -97,5 +97,5 @@ def special_function_elide_names(name: str) -> bool:
     return name in MAGIC_METHODS_POS_ARGS_ONLY
 
 
-def argument_elide_name(name: Union[str, Tuple, None]) -> bool:
-    return isinstance(name, str) and name.startswith("__")
+def argument_elide_name(name: Optional[str]) -> bool:
+    return name is not None and name.startswith("__")
diff --git a/mypy/solve.py b/mypy/solve.py
index ad6a882..8eb12b7 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -3,7 +3,7 @@
 from typing import List, Dict, Optional
 from collections import defaultdict
 
-from mypy.types import Type, NoneTyp, AnyType, UninhabitedType, TypeVarId
+from mypy.types import Type, NoneTyp, AnyType, UninhabitedType, TypeVarId, TypeOfAny
 from mypy.constraints import Constraint, SUPERTYPE_OF
 from mypy.join import join_types
 from mypy.meet import meet_types
@@ -52,7 +52,9 @@ def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint],
                     top = meet_types(top, c.target)
 
         if isinstance(top, AnyType) or isinstance(bottom, AnyType):
-            res.append(AnyType())
+            source_any = top if isinstance(top, AnyType) else bottom
+            assert isinstance(source_any, AnyType)
+            res.append(AnyType(TypeOfAny.from_another_any, source_any=source_any))
             continue
         elif bottom is None:
             if top:
@@ -62,7 +64,7 @@ def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint],
                 if strict:
                     candidate = UninhabitedType()
                 else:
-                    candidate = AnyType()
+                    candidate = AnyType(TypeOfAny.special_form)
         elif top is None:
             candidate = bottom
         elif is_subtype(bottom, top):
diff --git a/mypy/stats.py b/mypy/stats.py
index de56036..120fd16 100644
--- a/mypy/stats.py
+++ b/mypy/stats.py
@@ -2,12 +2,16 @@
 
 import cgi
 import os.path
+import typing
 
-from typing import Dict, List, cast, Tuple, Set, Optional
+from collections import Counter
+from typing import Dict, List, cast, Tuple, Optional
 
 from mypy.traverser import TraverserVisitor
+from mypy.typeanal import collect_all_inner_types
 from mypy.types import (
-    Type, AnyType, Instance, FunctionLike, TupleType, TypeVarType, TypeQuery, CallableType
+    Type, AnyType, Instance, FunctionLike, TupleType, TypeVarType, TypeQuery, CallableType,
+    TypeOfAny
 )
 from mypy import nodes
 from mypy.nodes import (
@@ -32,28 +36,37 @@ precision_names = [
 
 
 class StatisticsVisitor(TraverserVisitor):
-    def __init__(self, inferred: bool, filename: str, typemap: Dict[Expression, Type] = None,
-                 all_nodes: bool = False) -> None:
+    def __init__(self,
+                 inferred: bool,
+                 filename: str,
+                 typemap: Optional[Dict[Expression, Type]] = None,
+                 all_nodes: bool = False,
+                 visit_untyped_defs: bool = True) -> None:
         self.inferred = inferred
         self.filename = filename
         self.typemap = typemap
         self.all_nodes = all_nodes
+        self.visit_untyped_defs = visit_untyped_defs
 
-        self.num_precise = 0
-        self.num_imprecise = 0
-        self.num_any = 0
+        self.num_precise_exprs = 0
+        self.num_imprecise_exprs = 0
+        self.num_any_exprs = 0
 
-        self.num_simple = 0
-        self.num_generic = 0
-        self.num_tuple = 0
-        self.num_function = 0
-        self.num_typevar = 0
-        self.num_complex = 0
+        self.num_simple_types = 0
+        self.num_generic_types = 0
+        self.num_tuple_types = 0
+        self.num_function_types = 0
+        self.num_typevar_types = 0
+        self.num_complex_types = 0
+        self.num_any_types = 0
 
         self.line = -1
 
         self.line_map = {}  # type: Dict[int, int]
 
+        self.type_of_any_counter = Counter()  # type: typing.Counter[TypeOfAny.TypeOfAny]
+        self.any_line_map = {}  # type: Dict[int, List[AnyType]]
+
         self.output = []  # type: List[str]
 
         TraverserVisitor.__init__(self)
@@ -79,7 +92,8 @@ class StatisticsVisitor(TraverserVisitor):
                 self.type(sig.ret_type)
             elif self.all_nodes:
                 self.record_line(self.line, TYPE_ANY)
-            super().visit_func_def(o)
+            if not o.is_dynamic() or self.visit_untyped_defs:
+                super().visit_func_def(o)
 
     def visit_class_def(self, o: ClassDef) -> None:
         # Override this method because we don't want to analyze base_type_exprs (base_type_exprs
@@ -104,7 +118,8 @@ class StatisticsVisitor(TraverserVisitor):
             return
         if o.type:
             self.type(o.type)
-        elif self.inferred:
+        elif self.inferred and not self.all_nodes:
+            # if self.all_nodes is set, lvalues will be visited later
             for lvalue in o.lvalues:
                 if isinstance(lvalue, nodes.TupleExpr):
                     items = lvalue.items
@@ -115,15 +130,7 @@ class StatisticsVisitor(TraverserVisitor):
                 for item in items:
                     if isinstance(item, RefExpr) and item.is_def:
                         if self.typemap is not None:
-                            t = self.typemap.get(item)
-                        else:
-                            t = None
-                        if t:
-                            self.type(t)
-                        else:
-                            self.log('  !! No inferred type on line %d' %
-                                     self.line)
-                            self.record_line(self.line, TYPE_ANY)
+                            self.type(self.typemap.get(item))
         super().visit_assignment_stmt(o)
 
     def visit_name_expr(self, o: NameExpr) -> None:
@@ -177,40 +184,52 @@ class StatisticsVisitor(TraverserVisitor):
             self.record_line(self.line, TYPE_UNANALYZED)
             return
 
-        if isinstance(t, AnyType) and t.special_form:
+        if isinstance(t, AnyType) and t.type_of_any == TypeOfAny.special_form:
             # This is not a real Any type, so don't collect stats for it.
             return
 
         if isinstance(t, AnyType):
             self.log('  !! Any type around line %d' % self.line)
-            self.num_any += 1
+            self.num_any_exprs += 1
             self.record_line(self.line, TYPE_ANY)
         elif ((not self.all_nodes and is_imprecise(t)) or
               (self.all_nodes and is_imprecise2(t))):
             self.log('  !! Imprecise type around line %d' % self.line)
-            self.num_imprecise += 1
+            self.num_imprecise_exprs += 1
             self.record_line(self.line, TYPE_IMPRECISE)
         else:
-            self.num_precise += 1
+            self.num_precise_exprs += 1
             self.record_line(self.line, TYPE_PRECISE)
 
-        if isinstance(t, Instance):
-            if t.args:
-                if any(is_complex(arg) for arg in t.args):
-                    self.num_complex += 1
+        for typ in collect_all_inner_types(t) + [t]:
+            if isinstance(typ, AnyType):
+                if typ.type_of_any == TypeOfAny.from_another_any:
+                    assert typ.source_any
+                    assert typ.source_any.type_of_any != TypeOfAny.from_another_any
+                    typ = typ.source_any
+                self.type_of_any_counter[typ.type_of_any] += 1
+                self.num_any_types += 1
+                if self.line in self.any_line_map:
+                    self.any_line_map[self.line].append(typ)
+                else:
+                    self.any_line_map[self.line] = [typ]
+            elif isinstance(typ, Instance):
+                if typ.args:
+                    if any(is_complex(arg) for arg in typ.args):
+                        self.num_complex_types += 1
+                    else:
+                        self.num_generic_types += 1
+                else:
+                    self.num_simple_types += 1
+            elif isinstance(typ, FunctionLike):
+                self.num_function_types += 1
+            elif isinstance(typ, TupleType):
+                if any(is_complex(item) for item in typ.items):
+                    self.num_complex_types += 1
                 else:
-                    self.num_generic += 1
-            else:
-                self.num_simple += 1
-        elif isinstance(t, FunctionLike):
-            self.num_function += 1
-        elif isinstance(t, TupleType):
-            if any(is_complex(item) for item in t.items):
-                self.num_complex += 1
-            else:
-                self.num_tuple += 1
-        elif isinstance(t, TypeVarType):
-            self.num_typevar += 1
+                    self.num_tuple_types += 1
+            elif isinstance(typ, TypeVarType):
+                self.num_typevar_types += 1
 
     def log(self, string: str) -> None:
         self.output.append(string)
@@ -221,7 +240,7 @@ class StatisticsVisitor(TraverserVisitor):
 
 
 def dump_type_stats(tree: MypyFile, path: str, inferred: bool = False,
-                    typemap: Dict[Expression, Type] = None) -> None:
+                    typemap: Optional[Dict[Expression, Type]] = None) -> None:
     if is_special_module(path):
         return
     print(path)
@@ -230,17 +249,17 @@ def dump_type_stats(tree: MypyFile, path: str, inferred: bool = False,
     for line in visitor.output:
         print(line)
     print('  ** precision **')
-    print('  precise  ', visitor.num_precise)
-    print('  imprecise', visitor.num_imprecise)
-    print('  any      ', visitor.num_any)
+    print('  precise  ', visitor.num_precise_exprs)
+    print('  imprecise', visitor.num_imprecise_exprs)
+    print('  any      ', visitor.num_any_exprs)
     print('  ** kinds **')
-    print('  simple   ', visitor.num_simple)
-    print('  generic  ', visitor.num_generic)
-    print('  function ', visitor.num_function)
-    print('  tuple    ', visitor.num_tuple)
-    print('  TypeVar  ', visitor.num_typevar)
-    print('  complex  ', visitor.num_complex)
-    print('  any      ', visitor.num_any)
+    print('  simple   ', visitor.num_simple_types)
+    print('  generic  ', visitor.num_generic_types)
+    print('  function ', visitor.num_function_types)
+    print('  tuple    ', visitor.num_tuple_types)
+    print('  TypeVar  ', visitor.num_typevar_types)
+    print('  complex  ', visitor.num_complex_types)
+    print('  any      ', visitor.num_any_types)
 
 
 def is_special_module(path: str) -> bool:
@@ -329,7 +348,8 @@ def generate_html_report(tree: MypyFile, path: str, type_map: Dict[Expression, T
             status = visitor.line_map.get(lineno, TYPE_PRECISE)
             style_map = {TYPE_PRECISE: 'white',
                          TYPE_IMPRECISE: 'yellow',
-                         TYPE_ANY: 'red'}
+                         TYPE_ANY: 'red',
+                         TYPE_UNANALYZED: 'red'}
             style = style_map[status]
             append('<span class="lineno">%4d</span>   ' % lineno +
                    '<span class="%s">%s</span>' % (style,
diff --git a/mypy/strconv.py b/mypy/strconv.py
index ab0593a..099515f 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -175,12 +175,12 @@ class StrConv(NodeVisitor[str]):
         return self.dump(a, o)
 
     def visit_var(self, o: 'mypy.nodes.Var') -> str:
-        l = ''
+        lst = ''
         # Add :nil line number tag if no line number is specified to remain
         # compatible with old test case descriptions that assume this.
         if o.line < 0:
-            l = ':nil'
-        return 'Var' + l + '(' + o.name() + ')'
+            lst = ':nil'
+        return 'Var' + lst + '(' + o.name() + ')'
 
     def visit_global_decl(self, o: 'mypy.nodes.GlobalDecl') -> str:
         return self.dump([o.names], o)
@@ -345,8 +345,8 @@ class StrConv(NodeVisitor[str]):
         pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_def, o.node)
         return short_type(o) + '(' + pretty + ')'
 
-    def pretty_name(self, name: str, kind: int, fullname: str, is_def: bool,
-                    target_node: 'mypy.nodes.Node' = None) -> str:
+    def pretty_name(self, name: str, kind: Optional[int], fullname: Optional[str],
+                    is_def: bool, target_node: 'Optional[mypy.nodes.Node]' = None) -> str:
         n = name
         if is_def:
             n += '*'
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index bd1dbeb..ce93bf7 100644
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -44,9 +44,10 @@ import subprocess
 import sys
 import textwrap
 import traceback
+from collections import defaultdict
 
 from typing import (
-    Any, List, Dict, Tuple, Iterable, Iterator, Optional, NamedTuple, Set, Union, cast
+    Any, List, Dict, Tuple, Iterable, Iterator, Mapping, Optional, NamedTuple, Set, Union, cast
 )
 
 import mypy.build
@@ -56,14 +57,16 @@ import mypy.traverser
 from mypy import defaults
 from mypy.nodes import (
     Expression, IntExpr, UnaryExpr, StrExpr, BytesExpr, NameExpr, FloatExpr, MemberExpr, TupleExpr,
-    ListExpr, ComparisonExpr, CallExpr, ClassDef, MypyFile, Decorator, AssignmentStmt,
-    IfStmt, ImportAll, ImportFrom, Import, FuncDef, FuncBase,
-    ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT,
+    ListExpr, ComparisonExpr, CallExpr, IndexExpr, EllipsisExpr,
+    ClassDef, MypyFile, Decorator, AssignmentStmt,
+    IfStmt, ImportAll, ImportFrom, Import, FuncDef, FuncBase, TempNode,
+    ARG_POS, ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT,
 )
 from mypy.stubgenc import parse_all_signatures, find_unique_signatures, generate_stub_for_c_module
 from mypy.stubutil import is_c_module, write_header
 from mypy.options import Options as MypyOptions
-
+from mypy.types import Type, TypeStrVisitor, AnyType, CallableType, UnboundType, NoneTyp, TupleType
+from mypy.visitor import NodeVisitor
 
 Options = NamedTuple('Options', [('pyversion', Tuple[int, int]),
                                  ('no_import', bool),
@@ -74,6 +77,7 @@ Options = NamedTuple('Options', [('pyversion', Tuple[int, int]),
                                  ('ignore_errors', bool),
                                  ('recursive', bool),
                                  ('include_private', bool),
+                                 ('output_dir', str),
                                  ])
 
 
@@ -118,6 +122,7 @@ def generate_stub_for_module(module: str, output_dir: str, quiet: bool = False,
         else:
             target += '.pyi'
         target = os.path.join(output_dir, target)
+
         generate_stub(module_path, output_dir, module_all,
                       target=target, add_header=add_header, module=module,
                       pyversion=pyversion, include_private=include_private)
@@ -135,6 +140,7 @@ def find_module_path_and_all(module: str, pyversion: Tuple[int, int],
     Return None if the module is a C module. Return (module_path, __all__) if
     Python module. Raise an exception or exit if failed.
     """
+    module_path = None  # type: Optional[str]
     if not no_import:
         if pyversion[0] == 2:
             module_path, module_all = load_python_module_info(module, interpreter)
@@ -185,8 +191,12 @@ def load_python_module_info(module: str, interpreter: str) -> Tuple[str, Optiona
     return module_path, module_all
 
 
-def generate_stub(path: str, output_dir: str, _all_: Optional[List[str]] = None,
-                  target: str = None, add_header: bool = False, module: str = None,
+def generate_stub(path: str,
+                  output_dir: str,
+                  _all_: Optional[List[str]] = None,
+                  target: Optional[str] = None,
+                  add_header: bool = False,
+                  module: Optional[str] = None,
                   pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION,
                   include_private: bool = False
                   ) -> None:
@@ -225,27 +235,188 @@ VAR = 'VAR'
 NOT_IN_ALL = 'NOT_IN_ALL'
 
 
+class AnnotationPrinter(TypeStrVisitor):
+
+    def __init__(self, stubgen: 'StubGenerator') -> None:
+        super().__init__()
+        self.stubgen = stubgen
+
+    def visit_unbound_type(self, t: UnboundType)-> str:
+        s = t.name
+        base = s.split('.')[0]
+        self.stubgen.import_tracker.require_name(base)
+        if t.args != []:
+            s += '[{}]'.format(self.list_str(t.args))
+        return s
+
+    def visit_none_type(self, t: NoneTyp) -> str:
+        return "None"
+
+
+class AliasPrinter(NodeVisitor[str]):
+
+    def __init__(self, stubgen: 'StubGenerator') -> None:
+        self.stubgen = stubgen
+        super().__init__()
+
+    def visit_call_expr(self, node: CallExpr) -> str:
+        # Call expressions are not usually types, but we also treat `X = TypeVar(...)` as a
+        # type alias that has to be preserved (even if TypeVar is not the same as an alias)
+        callee = node.callee.accept(self)
+        args = []
+        for name, arg, kind in zip(node.arg_names, node.args, node.arg_kinds):
+            if kind == ARG_POS:
+                args.append(arg.accept(self))
+            elif kind == ARG_STAR:
+                args.append('*' + arg.accept(self))
+            elif kind == ARG_STAR2:
+                args.append('**' + arg.accept(self))
+            elif kind == ARG_NAMED:
+                args.append('{}={}'.format(name, arg.accept(self)))
+            else:
+                raise ValueError("Unknown argument kind %d in call" % kind)
+        return "{}({})".format(callee, ", ".join(args))
+
+    def visit_name_expr(self, node: NameExpr) -> str:
+        self.stubgen.import_tracker.require_name(node.name)
+        return node.name
+
+    def visit_str_expr(self, node: StrExpr) -> str:
+        return repr(node.value)
+
+    def visit_index_expr(self, node: IndexExpr) -> str:
+        base = node.base.accept(self)
+        index = node.index.accept(self)
+        return "{}[{}]".format(base, index)
+
+    def visit_tuple_expr(self, node: TupleExpr) -> str:
+        return ", ".join(n.accept(self) for n in node.items)
+
+    def visit_list_expr(self, node: ListExpr) -> str:
+        return "[{}]".format(", ".join(n.accept(self) for n in node.items))
+
+    def visit_ellipsis(self, node: EllipsisExpr) -> str:
+        return "..."
+
+
+class ImportTracker:
+
+    def __init__(self) -> None:
+        # module_for['foo'] has the module name where 'foo' was imported from, or None if
+        # 'foo' is a module imported directly; examples
+        #     'from pkg.m import f as foo' ==> module_for['foo'] == 'pkg.m'
+        #     'from m import f' ==> module_for['f'] == 'm'
+        #     'import m' ==> module_for['m'] == None
+        self.module_for = {}  # type: Dict[str, Optional[str]]
+
+        # direct_imports['foo'] is the module path used when the name 'foo' was added to the
+        # namespace.
+        #   import foo.bar.baz  ==> direct_imports['foo'] == 'foo.bar.baz'
+        self.direct_imports = {}  # type: Dict[str, str]
+
+        # reverse_alias['foo'] is the name that 'foo' had originally when imported with an
+        # alias; examples
+        #     'import numpy as np' ==> reverse_alias['np'] == 'numpy'
+        #     'from decimal import Decimal as D' ==> reverse_alias['D'] == 'Decimal'
+        self.reverse_alias = {}  # type: Dict[str, str]
+
+        # required_names is the set of names that are actually used in a type annotation
+        self.required_names = set()  # type: Set[str]
+
+        # Names that should be reexported if they come from another module
+        self.reexports = set()  # type: Set[str]
+
+    def add_import_from(self, module: str, names: List[Tuple[str, Optional[str]]]) -> None:
+        for name, alias in names:
+            self.module_for[alias or name] = module
+            if alias:
+                self.reverse_alias[alias] = name
+
+    def add_import(self, module: str, alias: Optional[str]=None) -> None:
+        name = module.split('.')[0]
+        self.module_for[alias or name] = None
+        self.direct_imports[name] = module
+        if alias:
+            self.reverse_alias[alias] = name
+
+    def require_name(self, name: str) -> None:
+        self.required_names.add(name.split('.')[0])
+
+    def reexport(self, name: str) -> None:
+        """
+        Mark a given non qualified name as needed in __all__. This means that in case it
+        comes from a module, it should be imported with an alias even is the alias is the same
+        as the name.
+
+        """
+        self.require_name(name)
+        self.reexports.add(name)
+
+    def import_lines(self) -> List[str]:
+        """
+        The list of required import lines (as strings with python code)
+        """
+        result = []
+
+        # To summarize multiple names imported from a same module, we collect those
+        # in the `module_map` dictionary, mapping a module path to the list of names that should
+        # be imported from it. the names can also be alias in the form 'original as alias'
+        module_map = defaultdict(list)  # type: Mapping[str, List[str]]
+
+        for name in sorted(self.required_names):
+            # If we haven't seen this name in an import statement, ignore it
+            if name not in self.module_for:
+                continue
+
+            m = self.module_for[name]
+            if m is not None:
+                # This name was found in a from ... import ...
+                # Collect the name in the module_map
+                if name in self.reverse_alias:
+                    name = '{} as {}'.format(self.reverse_alias[name], name)
+                elif name in self.reexports:
+                    name = '{} as {}'.format(name, name)
+                module_map[m].append(name)
+            else:
+                # This name was found in an import ...
+                # We can already generate the import line
+                if name in self.reverse_alias:
+                    name, alias = self.reverse_alias[name], name
+                    result.append("import {} as {}\n".format(self.direct_imports[name], alias))
+                elif name in self.reexports:
+                    assert '.' not in name  # Because reexports only has nonqualified names
+                    result.append("import {} as {}\n".format(name, name))
+                else:
+                    result.append("import {}\n".format(self.direct_imports[name]))
+
+        # Now generate all the from ... import ... lines collected in module_map
+        for module, names in sorted(module_map.items()):
+            result.append("from {} import {}\n".format(module, ', '.join(sorted(names))))
+        return result
+
+
 class StubGenerator(mypy.traverser.TraverserVisitor):
     def __init__(self, _all_: Optional[List[str]], pyversion: Tuple[int, int],
                  include_private: bool = False) -> None:
         self._all_ = _all_
         self._output = []  # type: List[str]
         self._import_lines = []  # type: List[str]
-        self._imports = []  # type: List[str]
         self._indent = ''
         self._vars = [[]]  # type: List[List[str]]
         self._state = EMPTY
         self._toplevel_names = []  # type: List[str]
-        self._classes = set()  # type: Set[str]
-        self._base_classes = []  # type: List[str]
         self._pyversion = pyversion
         self._include_private = include_private
+        self.import_tracker = ImportTracker()
+        # Add imports that could be implicitly generated
+        self.import_tracker.add_import_from("collections", [("namedtuple", None)])
+        typing_imports = "Any Optional TypeVar".split()
+        self.import_tracker.add_import_from("typing", [(t, None) for t in typing_imports])
+        # Names in __all__ are required
+        for name in _all_ or ():
+            self.import_tracker.reexport(name)
 
     def visit_mypy_file(self, o: MypyFile) -> None:
-        self._classes = find_classes(o)
-        for node in o.defs:
-            if isinstance(node, ClassDef):
-                self._base_classes.extend(self.get_base_types(node))
         super().visit_mypy_file(o)
         undefined_names = [name for name in self._all_ or []
                            if name not in self._toplevel_names]
@@ -278,21 +449,34 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
             var = arg_.variable
             kind = arg_.kind
             name = var.name()
+            annotated_type = o.type.arg_types[i] if isinstance(o.type, CallableType) else None
+            if annotated_type and not (
+                    i == 0 and name == 'self' and isinstance(annotated_type, AnyType)):
+                annotation = ": {}".format(self.print_annotation(annotated_type))
+            else:
+                annotation = ""
             init_stmt = arg_.initialization_statement
             if init_stmt:
+                initializer = '...'
                 if kind in (ARG_NAMED, ARG_NAMED_OPT) and '*' not in args:
                     args.append('*')
-                typename = self.get_str_type_of_node(init_stmt.rvalue, True)
-                arg = '{}: {} = ...'.format(name, typename)
+                if not annotation:
+                    typename = self.get_str_type_of_node(init_stmt.rvalue, True)
+                    annotation = ': {} = ...'.format(typename)
+                else:
+                    annotation += '={}'.format(initializer)
+                arg = name + annotation
             elif kind == ARG_STAR:
-                arg = '*%s' % name
+                arg = '*%s%s' % (name, annotation)
             elif kind == ARG_STAR2:
-                arg = '**%s' % name
+                arg = '**%s%s' % (name, annotation)
             else:
-                arg = name
+                arg = name + annotation
             args.append(arg)
         retname = None
-        if o.name() == '__init__':
+        if isinstance(o.type, CallableType):
+            retname = self.print_annotation(o.type.ret_type)
+        elif o.name() == '__init__':
             retname = 'None'
         retfield = ''
         if retname is not None:
@@ -325,6 +509,8 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
         base_types = self.get_base_types(o)
         if base_types:
             self.add('(%s)' % ', '.join(base_types))
+            for base in base_types:
+                self.import_tracker.require_name(base)
         self.add(':\n')
         n = len(self._output)
         self._indent += '    '
@@ -332,6 +518,7 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
         super().visit_class_def(o)
         self._indent = self._indent[:-4]
         self._vars.pop()
+        self._vars[-1].append(o.name)
         if len(self._output) == n:
             if self._state == EMPTY_CLASS and sep is not None:
                 self._output[sep] = ''
@@ -349,7 +536,9 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
             elif isinstance(base, MemberExpr):
                 modname = get_qualified_name(base.expr)
                 base_types.append('%s.%s' % (modname, base.name))
-                self.add_import_line('import %s\n' % modname)
+            elif isinstance(base, IndexExpr):
+                p = AliasPrinter(self)
+                base_types.append(base.accept(p))
         return base_types
 
     def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
@@ -360,17 +549,24 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
                 assert isinstance(o.rvalue, CallExpr)
                 self.process_namedtuple(lvalue, o.rvalue)
                 continue
-            if isinstance(lvalue, TupleExpr):
-                items = lvalue.items
-            elif isinstance(lvalue, ListExpr):
+            if (self.is_top_level() and
+                    isinstance(lvalue, NameExpr) and self.is_type_expression(o.rvalue)):
+                self.process_typealias(lvalue, o.rvalue)
+                continue
+            if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr):
                 items = lvalue.items
+                if isinstance(o.type, TupleType):
+                    annotations = o.type.items  # type: Iterable[Optional[Type]]
+                else:
+                    annotations = [None] * len(items)
             else:
                 items = [lvalue]
+                annotations = [o.type]
             sep = False
             found = False
-            for item in items:
+            for item, annotation in zip(items, annotations):
                 if isinstance(item, NameExpr):
-                    init = self.get_init(item.name, o.rvalue)
+                    init = self.get_init(item.name, o.rvalue, annotation)
                     if init:
                         found = True
                         if not sep and not self._indent and \
@@ -392,7 +588,7 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
                 (isinstance(callee, MemberExpr) and callee.name == 'namedtuple'))
 
     def process_namedtuple(self, lvalue: NameExpr, rvalue: CallExpr) -> None:
-        self.add_import_line('from collections import namedtuple\n')
+        self.import_tracker.require_name('namedtuple')
         if self._state != EMPTY:
             self.add('\n')
         name = repr(getattr(rvalue.args[0], 'value', '<ERROR>'))
@@ -404,9 +600,50 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
         else:
             items = '<ERROR>'
         self.add('%s = namedtuple(%s, %s)\n' % (lvalue.name, name, items))
-        self._classes.add(lvalue.name)
         self._state = CLASS
 
+    def is_type_expression(self, expr: Expression, top_level: bool=True) -> bool:
+        """Return True for things that look like type expressions
+
+        Used to know if assignments look like typealiases
+        """
+        # Assignment of TypeVar(...) are passed through
+        if (isinstance(expr, CallExpr) and
+                isinstance(expr.callee, NameExpr) and
+                expr.callee.name == 'TypeVar'):
+            return True
+        elif isinstance(expr, EllipsisExpr):
+            return not top_level
+        elif isinstance(expr, NameExpr):
+            if expr.name in ('True', 'False'):
+                return False
+            elif expr.name == 'None':
+                return not top_level
+            else:
+                return True
+        elif isinstance(expr, IndexExpr) and isinstance(expr.base, NameExpr):
+            if isinstance(expr.index, TupleExpr):
+                indices = expr.index.items
+            else:
+                indices = [expr.index]
+            if expr.base.name == 'Callable' and len(indices) == 2:
+                args, ret = indices
+                if isinstance(args, EllipsisExpr):
+                    indices = [ret]
+                elif isinstance(args, ListExpr):
+                    indices = args.items + [ret]
+                else:
+                    return False
+            return all(self.is_type_expression(i, top_level=False) for i in indices)
+        else:
+            return False
+
+    def process_typealias(self, lvalue: NameExpr, rvalue: Expression) -> None:
+        p = AliasPrinter(self)
+        self.add("{} = {}\n".format(lvalue.name, rvalue.accept(p)))
+        self.record_name(lvalue.name)
+        self._vars[-1].append(lvalue.name)
+
     def visit_if_stmt(self, o: IfStmt) -> None:
         # Ignore if __name__ == '__main__'.
         expr = o.expr[0]
@@ -423,53 +660,38 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
 
     def visit_import_from(self, o: ImportFrom) -> None:
         exported_names = set()  # type: Set[str]
+        self.import_tracker.add_import_from('.' * o.relative + o.id, o.names)
+        self._vars[-1].extend(alias or name for name, alias in o.names)
+        for name, alias in o.names:
+            self.record_name(alias or name)
+
         if self._all_:
             # Include import froms that import names defined in __all__.
             names = [name for name, alias in o.names
                      if name in self._all_ and alias is None]
             exported_names.update(names)
-            self.import_and_export_names(o.id, o.relative, names)
         else:
             # Include import from targets that import from a submodule of a package.
             if o.relative:
                 sub_names = [name for name, alias in o.names
                              if alias is None]
                 exported_names.update(sub_names)
-                self.import_and_export_names(o.id, o.relative, sub_names)
-        # Import names used as base classes.
-        base_names = [(name, alias) for name, alias in o.names
-                      if alias or name in self._base_classes and name not in exported_names]
-        if base_names:
-            imp_names = []  # type: List[str]
-            for name, alias in base_names:
-                if alias is not None and alias != name:
-                    imp_names.append('%s as %s' % (name, alias))
-                else:
-                    imp_names.append(name)
-            self.add_import_line('from %s%s import %s\n' % (
-                '.' * o.relative, o.id, ', '.join(imp_names)))
-
-    def import_and_export_names(self, module_id: str, relative: int, names: Iterable[str]) -> None:
-        """Import names from a module and export them (via from ... import x as x)."""
-        if names and module_id:
-            full_module_name = '%s%s' % ('.' * relative, module_id)
-            imported_names = ', '.join(['%s as %s' % (name, name) for name in names])
-            self.add_import_line('from %s import %s\n' % (full_module_name, imported_names))
-            for name in names:
-                self.record_name(name)
+                if o.id:
+                    for name in sub_names:
+                        self.import_tracker.require_name(name)
 
     def visit_import(self, o: Import) -> None:
         for id, as_id in o.ids:
+            self.import_tracker.add_import(id, as_id)
             if as_id is None:
                 target_name = id.split('.')[0]
             else:
                 target_name = as_id
-            if self._all_ and target_name in self._all_ and (as_id is not None or
-                                                             '.' not in id):
-                self.add_import_line('import %s as %s\n' % (id, target_name))
-                self.record_name(target_name)
+            self._vars[-1].append(target_name)
+            self.record_name(target_name)
 
-    def get_init(self, lvalue: str, rvalue: Expression) -> Optional[str]:
+    def get_init(self, lvalue: str, rvalue: Expression,
+                 annotation: Optional[Type] = None) -> Optional[str]:
         """Return initializer for a variable.
 
         Return None if we've generated one already or if the variable is internal.
@@ -481,8 +703,13 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
         if self.is_private_name(lvalue) or self.is_not_in_all(lvalue):
             return None
         self._vars[-1].append(lvalue)
-        typename = self.get_str_type_of_node(rvalue)
-        return '%s%s = ...  # type: %s\n' % (self._indent, lvalue, typename)
+        if annotation is not None:
+            typename = self.print_annotation(annotation)
+        else:
+            typename = self.get_str_type_of_node(rvalue)
+        has_rhs = not (isinstance(rvalue, TempNode) and rvalue.no_rhs)
+        initializer = " = ..." if has_rhs and not self.is_top_level() else ""
+        return '%s%s: %s%s\n' % (self._indent, lvalue, typename, initializer)
 
     def add(self, string: str) -> None:
         """Add text to generated stub."""
@@ -493,8 +720,7 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
 
         The import will be internal to the stub.
         """
-        if name not in self._imports:
-            self._imports.append(name)
+        self.import_tracker.require_name(name)
 
     def add_import_line(self, line: str) -> None:
         """Add a line of text to the import section, unless it's already there."""
@@ -504,10 +730,9 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
     def output(self) -> str:
         """Return the text for the stub."""
         imports = ''
-        if self._imports:
-            imports += 'from typing import %s\n' % ", ".join(sorted(self._imports))
         if self._import_lines:
             imports += ''.join(self._import_lines)
+        imports += ''.join(self.import_tracker.import_lines())
         if imports and self._output:
             imports += '\n'
         return imports + ''.join(self._output)
@@ -554,6 +779,10 @@ class StubGenerator(mypy.traverser.TraverserVisitor):
         self.add_typing_import('Any')
         return 'Any'
 
+    def print_annotation(self, t: Type) -> str:
+        printer = AnnotationPrinter(self)
+        return t.accept(printer)
+
     def is_top_level(self) -> bool:
         """Are we processing the top level of a file?"""
         return self._indent == ''
@@ -586,17 +815,6 @@ def find_self_initializers(fdef: FuncBase) -> List[Tuple[str, Expression]]:
     return results
 
 
-def find_classes(node: MypyFile) -> Set[str]:
-    results = set()  # type: Set[str]
-
-    class ClassTraverser(mypy.traverser.TraverserVisitor):
-        def visit_class_def(self, o: ClassDef) -> None:
-            results.add(o.name)
-
-    node.accept(ClassTraverser())
-    return results
-
-
 def get_qualified_name(o: Expression) -> str:
     if isinstance(o, NameExpr):
         return o.name
@@ -618,8 +836,8 @@ def walk_packages(packages: List[str]) -> Iterator[str]:
 
 def main() -> None:
     options = parse_options(sys.argv[1:])
-    if not os.path.isdir('out'):
-        raise SystemExit('Directory "out" does not exist')
+    if not os.path.isdir(options.output_dir):
+        raise SystemExit('Directory "{}" does not exist'.format(options.output_dir))
     if options.recursive and options.no_import:
         raise SystemExit('recursive stub generation without importing is not currently supported')
     sigs = {}  # type: Any
@@ -636,7 +854,8 @@ def main() -> None:
         class_sigs = dict(find_unique_signatures(all_class_sigs))
     for module in (options.modules if not options.recursive else walk_packages(options.modules)):
         try:
-            generate_stub_for_module(module, 'out',
+            generate_stub_for_module(module,
+                                     output_dir=options.output_dir,
                                      add_header=True,
                                      sigs=sigs,
                                      class_sigs=class_sigs,
@@ -653,6 +872,8 @@ def main() -> None:
 
 
 def parse_options(args: List[str]) -> Options:
+    # TODO: why not use click and reduce the amount of code to maintain
+    # within this module.
     pyversion = defaults.PYTHON3_VERSION
     no_import = False
     recursive = False
@@ -661,8 +882,12 @@ def parse_options(args: List[str]) -> Options:
     search_path = []  # type: List[str]
     interpreter = ''
     include_private = False
+    output_dir = 'out'
     while args and args[0].startswith('-'):
-        if args[0] == '--doc-dir':
+        if args[0] in '-o':
+            output_dir = args[1]
+            args = args[1:]
+        elif args[0] == '--doc-dir':
             doc_dir = args[1]
             args = args[1:]
         elif args[0] == '--search-path':
@@ -692,6 +917,9 @@ def parse_options(args: List[str]) -> Options:
         usage()
     if not interpreter:
         interpreter = sys.executable if pyversion[0] == 3 else default_python2_interpreter()
+    # Create the output folder if it doesn't already exist.
+    if not os.path.exists(output_dir):
+        os.makedirs(output_dir)
     return Options(pyversion=pyversion,
                    no_import=no_import,
                    doc_dir=doc_dir,
@@ -700,7 +928,8 @@ def parse_options(args: List[str]) -> Options:
                    modules=args,
                    ignore_errors=ignore_errors,
                    recursive=recursive,
-                   include_private=include_private)
+                   include_private=include_private,
+                   output_dir=output_dir)
 
 
 def default_python2_interpreter() -> str:
@@ -718,7 +947,8 @@ def default_python2_interpreter() -> str:
 def usage() -> None:
     usage = textwrap.dedent("""\
         usage: stubgen [--py2] [--no-import] [--doc-dir PATH]
-                       [--search-path PATH] [-p PATH] MODULE ...
+                       [--search-path PATH] [-p PATH] [-o PATH]
+                       MODULE ...
 
         Generate draft stubs for modules.
 
@@ -743,6 +973,7 @@ def usage() -> None:
                           (currently only used if --no-import is given)
           -p PATH         use Python interpreter at PATH (only works for
                           Python 2 right now)
+          -o PATH         Change the output folder [default: out]
           -h, --help      print this help message and exit
     """.rstrip())
 
diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py
index 92b78fa..fee521d 100644
--- a/mypy/stubgenc.py
+++ b/mypy/stubgenc.py
@@ -6,7 +6,7 @@ The public interface is via the mypy.stubgen module.
 import importlib
 import os.path
 import re
-from typing import List, Dict, Tuple
+from typing import List, Dict, Tuple, Optional
 from types import ModuleType
 
 from mypy.stubutil import (
@@ -102,9 +102,9 @@ def generate_c_function_stub(module: ModuleType,
                              name: str,
                              obj: object,
                              output: List[str],
-                             self_var: str = None,
+                             self_var: Optional[str] = None,
                              sigs: Dict[str, str] = {},
-                             class_name: str = None,
+                             class_name: Optional[str] = None,
                              class_sigs: Dict[str, str] = {},
                              ) -> None:
     if self_var:
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 589e9b8..e5034cd 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -1,10 +1,11 @@
-from typing import List, Optional, Dict, Callable, cast
+from typing import List, Optional, Dict, Callable, Tuple, Iterator, Set, Union, cast
+from contextlib import contextmanager
 
 from mypy.types import (
-    Type, AnyType, UnboundType, TypeVisitor, FormalArgument, NoneTyp,
+    Type, AnyType, UnboundType, TypeVisitor, FormalArgument, NoneTyp, function_type,
     Instance, TypeVarType, CallableType, TupleType, TypedDictType, UnionType, Overloaded,
-    ErasedType, TypeList, PartialType, DeletedType, UninhabitedType, TypeType,
-    is_named_instance
+    ErasedType, TypeList, PartialType, DeletedType, UninhabitedType, TypeType, is_named_instance,
+    FunctionLike, TypeOfAny
 )
 import mypy.applytype
 import mypy.constraints
@@ -13,15 +14,22 @@ from mypy.erasetype import erase_type
 # import mypy.solve
 from mypy import messages, sametypes
 from mypy.nodes import (
-    CONTRAVARIANT, COVARIANT,
-    ARG_POS, ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2,
+    FuncBase, Var, Decorator, OverloadedFuncDef, TypeInfo, CONTRAVARIANT, COVARIANT,
+    ARG_POS, ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2
 )
 from mypy.maptype import map_instance_to_supertype
+from mypy.expandtype import expand_type_by_instance
 from mypy.sametypes import is_same_type
 
 from mypy import experiments
 
 
+# Flags for detected protocol members
+IS_SETTABLE = 1
+IS_CLASSVAR = 2
+IS_CLASS_OR_STATIC = 3
+
+
 TypeParameterChecker = Callable[[Type, Type, int], bool]
 
 
@@ -36,7 +44,8 @@ def check_type_parameter(lefta: Type, righta: Type, variance: int) -> bool:
 
 def is_subtype(left: Type, right: Type,
                type_parameter_checker: TypeParameterChecker = check_type_parameter,
-               *, ignore_pos_arg_names: bool = False) -> bool:
+               *, ignore_pos_arg_names: bool = False,
+               ignore_declared_variance: bool = False) -> bool:
     """Is 'left' subtype of 'right'?
 
     Also consider Any to be a subtype of any type, and vice versa. This
@@ -70,7 +79,8 @@ def is_subtype(left: Type, right: Type,
             return True
         # otherwise, fall through
     return left.accept(SubtypeVisitor(right, type_parameter_checker,
-                                      ignore_pos_arg_names=ignore_pos_arg_names))
+                                      ignore_pos_arg_names=ignore_pos_arg_names,
+                                      ignore_declared_variance=ignore_declared_variance))
 
 
 def is_subtype_ignoring_tvars(left: Type, right: Type) -> bool:
@@ -94,10 +104,12 @@ class SubtypeVisitor(TypeVisitor[bool]):
 
     def __init__(self, right: Type,
                  type_parameter_checker: TypeParameterChecker,
-                 *, ignore_pos_arg_names: bool = False) -> None:
+                 *, ignore_pos_arg_names: bool = False,
+                 ignore_declared_variance: bool = False) -> None:
         self.right = right
         self.check_type_parameter = type_parameter_checker
         self.ignore_pos_arg_names = ignore_pos_arg_names
+        self.ignore_declared_variance = ignore_declared_variance
 
     # visit_x(left) means: is left (which is an instance of X) a subtype of
     # right?
@@ -111,7 +123,9 @@ class SubtypeVisitor(TypeVisitor[bool]):
     def visit_none_type(self, left: NoneTyp) -> bool:
         if experiments.STRICT_OPTIONAL:
             return (isinstance(self.right, NoneTyp) or
-                    is_named_instance(self.right, 'builtins.object'))
+                    is_named_instance(self.right, 'builtins.object') or
+                    isinstance(self.right, Instance) and self.right.type.is_protocol and
+                    not self.right.type.protocol_members)
         else:
             return True
 
@@ -131,30 +145,40 @@ class SubtypeVisitor(TypeVisitor[bool]):
         if isinstance(right, TupleType) and right.fallback.type.is_enum:
             return is_subtype(left, right.fallback)
         if isinstance(right, Instance):
-            # NOTO: left.type.mro may be None in quick mode if there
+            if right.type.is_cached_subtype_check(left, right):
+                return True
+            # NOTE: left.type.mro may be None in quick mode if there
             # was an error somewhere.
             if left.type.mro is not None:
                 for base in left.type.mro:
+                    # TODO: Also pass recursively ignore_declared_variance
                     if base._promote and is_subtype(
                             base._promote, self.right, self.check_type_parameter,
                             ignore_pos_arg_names=self.ignore_pos_arg_names):
+                        right.type.record_subtype_cache_entry(left, right)
                         return True
             rname = right.type.fullname()
-            if not left.type.has_base(rname) and rname != 'builtins.object':
-                return False
-
-            # Map left type to corresponding right instances.
-            t = map_instance_to_supertype(left, right.type)
-
-            return all(self.check_type_parameter(lefta, righta, tvar.variance)
-                       for lefta, righta, tvar in
-                       zip(t.args, right.args, right.type.defn.type_vars))
+            # Always try a nominal check if possible,
+            # there might be errors that a user wants to silence *once*.
+            if ((left.type.has_base(rname) or rname == 'builtins.object') and
+                    not self.ignore_declared_variance):
+                # Map left type to corresponding right instances.
+                t = map_instance_to_supertype(left, right.type)
+                nominal = all(self.check_type_parameter(lefta, righta, tvar.variance)
+                              for lefta, righta, tvar in
+                              zip(t.args, right.args, right.type.defn.type_vars))
+                if nominal:
+                    right.type.record_subtype_cache_entry(left, right)
+                return nominal
+            if right.type.is_protocol and is_protocol_implementation(left, right):
+                return True
+            return False
         if isinstance(right, TypeType):
             item = right.item
             if isinstance(item, TupleType):
                 item = item.fallback
             if is_named_instance(left, 'builtins.type'):
-                return is_subtype(TypeType(AnyType()), right)
+                return is_subtype(TypeType(AnyType(TypeOfAny.special_form)), right)
             if left.type.is_metaclass():
                 if isinstance(item, AnyType):
                     return True
@@ -164,7 +188,14 @@ class SubtypeVisitor(TypeVisitor[bool]):
                             and is_named_instance(item, 'enum.Enum')):
                         return True
                     return is_named_instance(item, 'builtins.object')
-        return False
+        if isinstance(right, CallableType):
+            # Special case: Instance can be a subtype of Callable.
+            call = find_member('__call__', left, left)
+            if call:
+                return is_subtype(call, right)
+            return False
+        else:
+            return False
 
     def visit_type_var(self, left: TypeVarType) -> bool:
         right = self.right
@@ -204,7 +235,7 @@ class SubtypeVisitor(TypeVisitor[bool]):
                 if right.args:
                     iter_type = right.args[0]
                 else:
-                    iter_type = AnyType()
+                    iter_type = AnyType(TypeOfAny.special_form)
                 return all(is_subtype(li, iter_type) for li in left.items)
             elif is_subtype(left.fallback, right, self.check_type_parameter):
                 return True
@@ -303,6 +334,205 @@ class SubtypeVisitor(TypeVisitor[bool]):
         return False
 
 
+ at contextmanager
+def pop_on_exit(stack: List[Tuple[Instance, Instance]],
+                left: Instance, right: Instance) -> Iterator[None]:
+    stack.append((left, right))
+    yield
+    stack.pop()
+
+
+def is_protocol_implementation(left: Instance, right: Instance,
+                               proper_subtype: bool = False) -> bool:
+    """Check whether 'left' implements the protocol 'right'.
+
+    If 'proper_subtype' is True, then check for a proper subtype.
+    Treat recursive protocols by using the 'assuming' structural subtype matrix
+    (in sparse representation, i.e. as a list of pairs (subtype, supertype)),
+    see also comment in nodes.TypeInfo. When we enter a check for classes
+    (A, P), defined as following::
+
+      class P(Protocol):
+          def f(self) -> P: ...
+      class A:
+          def f(self) -> A: ...
+
+    this results in A being a subtype of P without infinite recursion.
+    On every false result, we pop the assumption, thus avoiding an infinite recursion
+    as well.
+    """
+    assert right.type.is_protocol
+    assuming = right.type.assuming_proper if proper_subtype else right.type.assuming
+    for (l, r) in reversed(assuming):
+        if sametypes.is_same_type(l, left) and sametypes.is_same_type(r, right):
+            return True
+    with pop_on_exit(assuming, left, right):
+        for member in right.type.protocol_members:
+            # nominal subtyping currently ignores '__init__' and '__new__' signatures
+            if member in ('__init__', '__new__'):
+                continue
+            # The third argument below indicates to what self type is bound.
+            # We always bind self to the subtype. (Similarly to nominal types).
+            supertype = find_member(member, right, left)
+            assert supertype is not None
+            subtype = find_member(member, left, left)
+            # Useful for debugging:
+            # print(member, 'of', left, 'has type', subtype)
+            # print(member, 'of', right, 'has type', supertype)
+            if not subtype:
+                return False
+            if not proper_subtype:
+                # Nominal check currently ignores arg names
+                is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True)
+            else:
+                is_compat = is_proper_subtype(subtype, supertype)
+            if not is_compat:
+                return False
+            if isinstance(subtype, NoneTyp) and isinstance(supertype, CallableType):
+                # We want __hash__ = None idiom to work even without --strict-optional
+                return False
+            subflags = get_member_flags(member, left.type)
+            superflags = get_member_flags(member, right.type)
+            if IS_SETTABLE in superflags:
+                # Check opposite direction for settable attributes.
+                if not is_subtype(supertype, subtype):
+                    return False
+            if (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags):
+                return False
+            if IS_SETTABLE in superflags and IS_SETTABLE not in subflags:
+                return False
+            # This rule is copied from nominal check in checker.py
+            if IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags:
+                return False
+    right.type.record_subtype_cache_entry(left, right, proper_subtype)
+    return True
+
+
+def find_member(name: str, itype: Instance, subtype: Type) -> Optional[Type]:
+    """Find the type of member by 'name' in 'itype's TypeInfo.
+
+    Fin the member type after applying type arguments from 'itype', and binding
+    'self' to 'subtype'. Return None if member was not found.
+    """
+    # TODO: this code shares some logic with checkmember.analyze_member_access,
+    # consider refactoring.
+    info = itype.type
+    method = info.get_method(name)
+    if method:
+        if method.is_property:
+            assert isinstance(method, OverloadedFuncDef)
+            dec = method.items[0]
+            assert isinstance(dec, Decorator)
+            return find_node_type(dec.var, itype, subtype)
+        return find_node_type(method, itype, subtype)
+    else:
+        # don't have such method, maybe variable or decorator?
+        node = info.get(name)
+        if not node:
+            v = None
+        else:
+            v = node.node
+        if isinstance(v, Decorator):
+            v = v.var
+        if isinstance(v, Var):
+            return find_node_type(v, itype, subtype)
+        if not v and name not in ['__getattr__', '__setattr__', '__getattribute__']:
+            for method_name in ('__getattribute__', '__getattr__'):
+                # Normally, mypy assumes that instances that define __getattr__ have all
+                # attributes with the corresponding return type. If this will produce
+                # many false negatives, then this could be prohibited for
+                # structural subtyping.
+                method = info.get_method(method_name)
+                if method and method.info.fullname() != 'builtins.object':
+                    getattr_type = find_node_type(method, itype, subtype)
+                    if isinstance(getattr_type, CallableType):
+                        return getattr_type.ret_type
+        if itype.type.fallback_to_any:
+            return AnyType(TypeOfAny.special_form)
+    return None
+
+
+def get_member_flags(name: str, info: TypeInfo) -> Set[int]:
+    """Detect whether a member 'name' is settable, whether it is an
+    instance or class variable, and whether it is class or static method.
+
+    The flags are defined as following:
+    * IS_SETTABLE: whether this attribute can be set, not set for methods and
+      non-settable properties;
+    * IS_CLASSVAR: set if the variable is annotated as 'x: ClassVar[t]';
+    * IS_CLASS_OR_STATIC: set for methods decorated with @classmethod or
+      with @staticmethod.
+    """
+    method = info.get_method(name)
+    setattr_meth = info.get_method('__setattr__')
+    if method:
+        # this could be settable property
+        if method.is_property:
+            assert isinstance(method, OverloadedFuncDef)
+            dec = method.items[0]
+            assert isinstance(dec, Decorator)
+            if dec.var.is_settable_property or setattr_meth:
+                return {IS_SETTABLE}
+        return set()
+    node = info.get(name)
+    if not node:
+        if setattr_meth:
+            return {IS_SETTABLE}
+        return set()
+    v = node.node
+    if isinstance(v, Decorator):
+        if v.var.is_staticmethod or v.var.is_classmethod:
+            return {IS_CLASS_OR_STATIC}
+    # just a variable
+    if isinstance(v, Var):
+        flags = {IS_SETTABLE}
+        if v.is_classvar:
+            flags.add(IS_CLASSVAR)
+        return flags
+    return set()
+
+
+def find_node_type(node: Union[Var, FuncBase], itype: Instance, subtype: Type) -> Type:
+    """Find type of a variable or method 'node' (maybe also a decorated method).
+    Apply type arguments from 'itype', and bind 'self' to 'subtype'.
+    """
+    from mypy.checkmember import bind_self
+    if isinstance(node, FuncBase):
+        typ = function_type(node,
+                            fallback=Instance(itype.type.mro[-1], []))  # type: Optional[Type]
+    else:
+        typ = node.type
+    if typ is None:
+        return AnyType(TypeOfAny.from_error)
+    # We don't need to bind 'self' for static methods, since there is no 'self'.
+    if isinstance(node, FuncBase) or isinstance(typ, FunctionLike) and not node.is_staticmethod:
+        assert isinstance(typ, FunctionLike)
+        signature = bind_self(typ, subtype)
+        if node.is_property:
+            assert isinstance(signature, CallableType)
+            typ = signature.ret_type
+        else:
+            typ = signature
+    itype = map_instance_to_supertype(itype, node.info)
+    typ = expand_type_by_instance(typ, itype)
+    return typ
+
+
+def non_method_protocol_members(tp: TypeInfo) -> List[str]:
+    """Find all non-callable members of a protocol."""
+
+    assert tp.is_protocol
+    result = []  # type: List[str]
+    anytype = AnyType(TypeOfAny.special_form)
+    instance = Instance(tp, [anytype] * len(tp.defn.type_vars))
+
+    for member in tp.protocol_members:
+        typ = find_member(member, instance, instance)
+        if not isinstance(typ, CallableType):
+            result.append(member)
+    return result
+
+
 def is_callable_subtype(left: CallableType, right: CallableType,
                         ignore_return: bool = False,
                         ignore_pos_arg_names: bool = False,
@@ -549,8 +779,11 @@ def restrict_subtype_away(t: Type, s: Type) -> Type:
     if isinstance(t, UnionType):
         # Since runtime type checks will ignore type arguments, erase the types.
         erased_s = erase_type(s)
+        # TODO: Implement more robust support for runtime isinstance() checks,
+        # see issue #3827
         new_items = [item for item in t.relevant_items()
-                     if (not is_proper_subtype(erase_type(item), erased_s)
+                     if (not (is_proper_subtype(erase_type(item), erased_s) or
+                              is_proper_subtype(item, erased_s))
                          or isinstance(item, AnyType))]
         return UnionType.make_union(new_items)
     else:
@@ -602,26 +835,38 @@ class ProperSubtypeVisitor(TypeVisitor[bool]):
     def visit_instance(self, left: Instance) -> bool:
         right = self.right
         if isinstance(right, Instance):
+            if right.type.is_cached_subtype_check(left, right, proper_subtype=True):
+                return True
             for base in left.type.mro:
                 if base._promote and is_proper_subtype(base._promote, right):
+                    right.type.record_subtype_cache_entry(left, right, proper_subtype=True)
                     return True
 
-            if not left.type.has_base(right.type.fullname()):
-                return False
-
-            def check_argument(leftarg: Type, rightarg: Type, variance: int) -> bool:
-                if variance == COVARIANT:
-                    return is_proper_subtype(leftarg, rightarg)
-                elif variance == CONTRAVARIANT:
-                    return is_proper_subtype(rightarg, leftarg)
-                else:
-                    return sametypes.is_same_type(leftarg, rightarg)
-
-            # Map left type to corresponding right instances.
-            left = map_instance_to_supertype(left, right.type)
-
-            return all(check_argument(ta, ra, tvar.variance) for ta, ra, tvar in
-                       zip(left.args, right.args, right.type.defn.type_vars))
+            if left.type.has_base(right.type.fullname()):
+                def check_argument(leftarg: Type, rightarg: Type, variance: int) -> bool:
+                    if variance == COVARIANT:
+                        return is_proper_subtype(leftarg, rightarg)
+                    elif variance == CONTRAVARIANT:
+                        return is_proper_subtype(rightarg, leftarg)
+                    else:
+                        return sametypes.is_same_type(leftarg, rightarg)
+                # Map left type to corresponding right instances.
+                left = map_instance_to_supertype(left, right.type)
+
+                nominal = all(check_argument(ta, ra, tvar.variance) for ta, ra, tvar in
+                              zip(left.args, right.args, right.type.defn.type_vars))
+                if nominal:
+                    right.type.record_subtype_cache_entry(left, right, proper_subtype=True)
+                return nominal
+            if (right.type.is_protocol and
+                    is_protocol_implementation(left, right, proper_subtype=True)):
+                return True
+            return False
+        if isinstance(right, CallableType):
+            call = find_member('__call__', left, left)
+            if call:
+                return is_proper_subtype(call, right)
+            return False
         return False
 
     def visit_type_var(self, left: TypeVarType) -> bool:
@@ -717,19 +962,14 @@ class ProperSubtypeVisitor(TypeVisitor[bool]):
         return False
 
 
-def is_more_precise(t: Type, s: Type) -> bool:
-    """Check if t is a more precise type than s.
+def is_more_precise(left: Type, right: Type) -> bool:
+    """Check if left is a more precise type than right.
 
-    A t is a proper subtype of s, t is also more precise than s. Also, if
-    s is Any, t is more precise than s for any t. Finally, if t is the same
-    type as s, t is more precise than s.
+    A left is a proper subtype of right, left is also more precise than
+    right. Also, if right is Any, left is more precise than right, for
+    any left.
     """
     # TODO Should List[int] be more precise than List[Any]?
-    if isinstance(s, AnyType):
+    if isinstance(right, AnyType):
         return True
-    if isinstance(s, Instance):
-        if isinstance(t, CallableType):
-            # Fall back to subclass check and ignore other properties of the callable.
-            return is_proper_subtype(t.fallback, s)
-        return is_proper_subtype(t, s)
-    return sametypes.is_same_type(t, s)
+    return is_proper_subtype(left, right)
diff --git a/mypy/test/data.py b/mypy/test/data.py
index 38adf18..a080ede 100644
--- a/mypy/test/data.py
+++ b/mypy/test/data.py
@@ -21,7 +21,7 @@ def parse_test_cases(
         perform: Optional[Callable[['DataDrivenTestCase'], None]],
         base_path: str = '.',
         optional_out: bool = False,
-        include_path: str = None,
+        include_path: Optional[str] = None,
         native_sep: bool = False) -> List['DataDrivenTestCase']:
     """Parse a file with test case descriptions.
 
@@ -38,10 +38,10 @@ def parse_test_cases(
     if not include_path:
         include_path = os.path.dirname(path)
     with open(path, encoding='utf-8') as f:
-        l = f.readlines()
-    for i in range(len(l)):
-        l[i] = l[i].rstrip('\n')
-    p = parse_test_data(l, path)
+        lst = f.readlines()
+    for i in range(len(lst)):
+        lst[i] = lst[i].rstrip('\n')
+    p = parse_test_data(lst, path)
     out = []  # type: List[DataDrivenTestCase]
 
     # Process the parsed items. Each item has a header of form [id args],
@@ -556,6 +556,9 @@ class MypyDataCase(pytest.Item):  # type: ignore  # inheriting from Any
 
 
 class DataSuite:
+    def __init__(self, *, update_data: bool) -> None:
+        self.update_data = update_data
+
     @classmethod
     def cases(cls) -> List[DataDrivenTestCase]:
         return []
diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py
index 2f28ab1..8bd3a61 100644
--- a/mypy/test/helpers.py
+++ b/mypy/test/helpers.py
@@ -87,6 +87,7 @@ def assert_string_arrays_equal(expected: List[str], actual: List[str],
 
 
 def update_testcase_output(testcase: DataDrivenTestCase, output: List[str]) -> None:
+    assert testcase.old_cwd is not None, "test was not properly set up"
     testcase_path = os.path.join(testcase.old_cwd, testcase.file)
     with open(testcase_path) as f:
         data_lines = f.read().splitlines()
diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py
index 3aae19f..10a46e0 100644
--- a/mypy/test/testcheck.py
+++ b/mypy/test/testcheck.py
@@ -72,6 +72,7 @@ files = [
     'check-generic-subtyping.test',
     'check-varargs.test',
     'check-newsyntax.test',
+    'check-protocols.test',
     'check-underscores.test',
     'check-classvar.test',
     'check-enum.test',
@@ -82,8 +83,6 @@ files = [
 
 
 class TypeCheckSuite(DataSuite):
-    def __init__(self, *, update_data: bool = False) -> None:
-        self.update_data = update_data
 
     @classmethod
     def cases(cls) -> List[DataDrivenTestCase]:
@@ -288,10 +287,11 @@ class TypeCheckSuite(DataSuite):
 
     def find_missing_cache_files(self, modules: Dict[str, str],
                                  manager: build.BuildManager) -> Set[str]:
+        ignore_errors = True
         missing = {}
         for id, path in modules.items():
             meta = build.find_cache_meta(id, path, manager)
-            if not build.validate_meta(meta, id, path, manager):
+            if not build.validate_meta(meta, id, path, ignore_errors, manager):
                 missing[id] = path
         return set(missing.values())
 
@@ -329,6 +329,7 @@ class TypeCheckSuite(DataSuite):
             out = []
             for module_name in module_names.split(' '):
                 path = build.find_module(module_name, [test_temp_dir])
+                assert path is not None, "Can't find ad hoc case file"
                 with open(path) as f:
                     program_text = f.read()
                 out.append((module_name, path, program_text))
diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py
index 3452db6..5bf638a 100644
--- a/mypy/test/testcmdline.py
+++ b/mypy/test/testcmdline.py
@@ -14,7 +14,7 @@ from typing import Tuple, List, Dict, Set
 from mypy.myunit import Suite, SkipTestCaseException, AssertionFailure
 from mypy.test.config import test_data_prefix, test_temp_dir
 from mypy.test.data import fix_cobertura_filename
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
 from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages
 from mypy.version import __version__, base_version
 
@@ -28,9 +28,10 @@ cmdline_files = [
 ]
 
 
-class PythonEvaluationSuite(Suite):
+class PythonEvaluationSuite(DataSuite):
 
-    def cases(self) -> List[DataDrivenTestCase]:
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         c = []  # type: List[DataDrivenTestCase]
         for f in cmdline_files:
             c += parse_test_cases(os.path.join(test_data_prefix, f),
@@ -40,8 +41,12 @@ class PythonEvaluationSuite(Suite):
                                   native_sep=True)
         return c
 
+    def run_case(self, testcase: DataDrivenTestCase):
+        test_python_evaluation(testcase)
+
 
 def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
+    assert testcase.old_cwd is not None, "test was not properly set up"
     # Write the program to a file.
     program = '_program.py'
     program_path = os.path.join(test_temp_dir, program)
diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py
index a648782..d6580c4 100644
--- a/mypy/test/testdeps.py
+++ b/mypy/test/testdeps.py
@@ -20,8 +20,6 @@ files = [
 
 
 class GetDependenciesSuite(DataSuite):
-    def __init__(self, *, update_data: bool) -> None:
-        pass
 
     @classmethod
     def cases(cls) -> List[DataDrivenTestCase]:
diff --git a/mypy/test/testdiff.py b/mypy/test/testdiff.py
index 84e5389..3f9d234 100644
--- a/mypy/test/testdiff.py
+++ b/mypy/test/testdiff.py
@@ -20,8 +20,6 @@ files = [
 
 
 class ASTDiffSuite(DataSuite):
-    def __init__(self, *, update_data: bool) -> None:
-        pass
 
     @classmethod
     def cases(cls) -> List[DataDrivenTestCase]:
diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py
index fff2e18..7e442d5 100644
--- a/mypy/test/testfinegrained.py
+++ b/mypy/test/testfinegrained.py
@@ -35,9 +35,6 @@ files = [
 
 
 class FineGrainedSuite(DataSuite):
-    def __init__(self, *, update_data: bool) -> None:
-        pass
-
     @classmethod
     def cases(cls) -> List[DataDrivenTestCase]:
         c = []  # type: List[DataDrivenTestCase]
diff --git a/mypy/test/testinfer.py b/mypy/test/testinfer.py
index 2142456..72bb26f 100644
--- a/mypy/test/testinfer.py
+++ b/mypy/test/testinfer.py
@@ -5,7 +5,8 @@ from typing import List, Optional, Tuple, Union
 from mypy.myunit import Suite, assert_equal, assert_true
 from mypy.checkexpr import map_actuals_to_formals
 from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED
-from mypy.types import AnyType, TupleType, Type
+from mypy.types import AnyType, TupleType, Type, TypeOfAny
+from mypy.typefixture import TypeFixture
 
 
 class MapActualsToFormalsSuite(Suite):
@@ -70,24 +71,25 @@ class MapActualsToFormalsSuite(Suite):
                         [[0]])
 
     def test_tuple_star(self) -> None:
+        any_type = AnyType(TypeOfAny.special_form)
         self.assert_vararg_map(
             [ARG_STAR],
             [ARG_POS],
             [[0]],
-            self.tuple(AnyType()))
+            self.tuple(any_type))
         self.assert_vararg_map(
             [ARG_STAR],
             [ARG_POS, ARG_POS],
             [[0], [0]],
-            self.tuple(AnyType(), AnyType()))
+            self.tuple(any_type, any_type))
         self.assert_vararg_map(
             [ARG_STAR],
             [ARG_POS, ARG_OPT, ARG_OPT],
             [[0], [0], []],
-            self.tuple(AnyType(), AnyType()))
+            self.tuple(any_type, any_type))
 
     def tuple(self, *args: Type) -> TupleType:
-        return TupleType(list(args), None)
+        return TupleType(list(args), TypeFixture().std_tuple)
 
     def test_named_args(self) -> None:
         self.assert_map(
@@ -177,7 +179,7 @@ class MapActualsToFormalsSuite(Suite):
             caller_names,
             callee_kinds,
             callee_names,
-            lambda i: AnyType())
+            lambda i: AnyType(TypeOfAny.special_form))
         assert_equal(result, expected)
 
     def assert_vararg_map(self,
@@ -198,7 +200,7 @@ class MapActualsToFormalsSuite(Suite):
 def expand_caller_kinds(kinds_or_names: List[Union[int, str]]
                         ) -> Tuple[List[int], List[Optional[str]]]:
     kinds = []
-    names = []
+    names = []  # type: List[Optional[str]]
     for k in kinds_or_names:
         if isinstance(k, str):
             kinds.append(ARG_NAMED)
@@ -212,7 +214,7 @@ def expand_caller_kinds(kinds_or_names: List[Union[int, str]]
 def expand_callee_kinds(kinds_and_names: List[Union[int, Tuple[int, str]]]
                         ) -> Tuple[List[int], List[Optional[str]]]:
     kinds = []
-    names = []
+    names = []  # type: List[Optional[str]]
     for v in kinds_and_names:
         if isinstance(v, tuple):
             kinds.append(v[0])
diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py
index 9807098..4694865 100644
--- a/mypy/test/testmerge.py
+++ b/mypy/test/testmerge.py
@@ -2,7 +2,7 @@
 
 import os
 import shutil
-from typing import List, Tuple, Dict
+from typing import List, Tuple, Dict, Optional
 
 from mypy import build
 from mypy.build import BuildManager, BuildSource, State
@@ -20,7 +20,7 @@ from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
 from mypy.test.helpers import assert_string_arrays_equal
 from mypy.test.testtypegen import ignore_node
 from mypy.types import TypeStrVisitor, Type
-from mypy.util import short_type
+from mypy.util import short_type, IdMapper
 
 
 files = [
@@ -37,8 +37,10 @@ AST = 'AST'
 
 class ASTMergeSuite(DataSuite):
     def __init__(self, *, update_data: bool) -> None:
+        super().__init__(update_data=update_data)
         self.str_conv = StrConv(show_ids=True)
-        self.id_mapper = self.str_conv.id_mapper
+        assert self.str_conv.id_mapper is not None
+        self.id_mapper = self.str_conv.id_mapper  # type: IdMapper
         self.type_str_conv = TypeStrVisitor(self.id_mapper)
 
     @classmethod
@@ -64,6 +66,7 @@ class ASTMergeSuite(DataSuite):
 
         main_src = '\n'.join(testcase.input)
         messages, manager, graph = self.build(main_src)
+        assert manager is not None, 'cases where CompileError occurred should not be run'
 
         a = []
         if messages:
@@ -96,7 +99,7 @@ class ASTMergeSuite(DataSuite):
             'Invalid output ({}, line {})'.format(testcase.file,
                                                   testcase.line))
 
-    def build(self, source: str) -> Tuple[List[str], BuildManager, Dict[str, State]]:
+    def build(self, source: str) -> Tuple[List[str], Optional[BuildManager], Dict[str, State]]:
         options = Options()
         options.use_builtins_fixtures = True
         options.show_traceback = True
diff --git a/mypy/test/testparse.py b/mypy/test/testparse.py
index d6789c0..ef9632a 100644
--- a/mypy/test/testparse.py
+++ b/mypy/test/testparse.py
@@ -7,25 +7,29 @@ from typing import List
 from mypy import defaults
 from mypy.myunit import Suite, AssertionFailure
 from mypy.test.helpers import assert_string_arrays_equal
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
 from mypy.test import config
 from mypy.parse import parse
 from mypy.errors import CompileError
 from mypy.options import Options
 
 
-class ParserSuite(Suite):
+class ParserSuite(DataSuite):
     parse_files = ['parse.test',
                    'parse-python2.test']
 
-    def cases(self) -> List[DataDrivenTestCase]:
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         # The test case descriptions are stored in data files.
         c = []  # type: List[DataDrivenTestCase]
-        for f in self.parse_files:
+        for f in cls.parse_files:
             c += parse_test_cases(
                 os.path.join(config.test_data_prefix, f), test_parser)
         return c
 
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        test_parser(testcase)
+
 
 def test_parser(testcase: DataDrivenTestCase) -> None:
     """Perform a single parser test case.
@@ -57,13 +61,17 @@ def test_parser(testcase: DataDrivenTestCase) -> None:
 INPUT_FILE_NAME = 'file'
 
 
-class ParseErrorSuite(Suite):
-    def cases(self) -> List[DataDrivenTestCase]:
+class ParseErrorSuite(DataSuite):
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         # Test case descriptions are in an external file.
         return parse_test_cases(os.path.join(config.test_data_prefix,
                                              'parse-errors.test'),
                                 test_parse_error)
 
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        test_parse_error(testcase)
+
 
 def test_parse_error(testcase: DataDrivenTestCase) -> None:
     try:
diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py
index 602692e..635b99c 100644
--- a/mypy/test/testpythoneval.py
+++ b/mypy/test/testpythoneval.py
@@ -18,16 +18,14 @@ import re
 import subprocess
 import sys
 
-import typing
-from typing import Dict, List, Tuple
+import pytest  # type: ignore  # no pytest in typeshed
+from typing import Dict, List, Tuple, Optional
 
-from mypy.myunit import Suite, SkipTestCaseException
 from mypy.test.config import test_data_prefix, test_temp_dir
-from mypy.test.data import DataDrivenTestCase, parse_test_cases
+from mypy.test.data import DataDrivenTestCase, parse_test_cases, DataSuite
 from mypy.test.helpers import assert_string_arrays_equal
 from mypy.util import try_find_python2_interpreter
 
-
 # Files which contain test case descriptions.
 python_eval_files = ['pythoneval.test',
                      'python2eval.test']
@@ -39,8 +37,9 @@ python3_path = sys.executable
 program_re = re.compile(r'\b_program.py\b')
 
 
-class PythonEvaluationSuite(Suite):
-    def cases(self) -> List[DataDrivenTestCase]:
+class PythonEvaluationSuite(DataSuite):
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         c = []  # type: List[DataDrivenTestCase]
         for f in python_eval_files:
             c += parse_test_cases(os.path.join(test_data_prefix, f),
@@ -51,6 +50,9 @@ class PythonEvaluationSuite(Suite):
                     test_python_evaluation, test_temp_dir, True)
         return c
 
+    def run_case(self, testcase: DataDrivenTestCase):
+        test_python_evaluation(testcase)
+
 
 def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
     """Runs Mypy in a subprocess.
@@ -58,6 +60,7 @@ def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
     If this passes without errors, executes the script again with a given Python
     version.
     """
+    assert testcase.old_cwd is not None, "test was not properly set up"
     mypy_cmdline = [
         python3_path,
         os.path.join(testcase.old_cwd, 'scripts', 'mypy'),
@@ -67,9 +70,11 @@ def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
     if py2:
         mypy_cmdline.append('--py2')
         interpreter = try_find_python2_interpreter()
-        if not interpreter:
+        if interpreter is None:
             # Skip, can't find a Python 2 interpreter.
-            raise SkipTestCaseException()
+            pytest.skip()
+            # placate the type checker
+            return
     else:
         interpreter = python3_path
 
@@ -110,7 +115,7 @@ def adapt_output(testcase: DataDrivenTestCase) -> List[str]:
 
 
 def run(
-    cmdline: List[str], *, env: Dict[str, str] = None, timeout: int = 30
+    cmdline: List[str], *, env: Optional[Dict[str, str]] = None, timeout: int = 30
 ) -> Tuple[int, List[str]]:
     """A poor man's subprocess.run() for 3.3 and 3.4 compatibility."""
     process = subprocess.Popen(
diff --git a/mypy/test/testsemanal.py b/mypy/test/testsemanal.py
index d339a83..8ea7c1d 100644
--- a/mypy/test/testsemanal.py
+++ b/mypy/test/testsemanal.py
@@ -6,11 +6,10 @@ from typing import Dict, List
 
 from mypy import build
 from mypy.build import BuildSource
-from mypy.myunit import Suite
 from mypy.test.helpers import (
     assert_string_arrays_equal, normalize_error_messages, testfile_pyversion,
 )
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
 from mypy.test.config import test_data_prefix, test_temp_dir
 from mypy.errors import CompileError
 from mypy.nodes import TypeInfo
@@ -42,8 +41,9 @@ def get_semanal_options() -> Options:
     return options
 
 
-class SemAnalSuite(Suite):
-    def cases(self) -> List[DataDrivenTestCase]:
+class SemAnalSuite(DataSuite):
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         c = []  # type: List[DataDrivenTestCase]
         for f in semanal_files:
             c += parse_test_cases(os.path.join(test_data_prefix, f),
@@ -53,6 +53,9 @@ class SemAnalSuite(Suite):
                                   native_sep=True)
         return c
 
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        test_semanal(testcase)
+
 
 def test_semanal(testcase: DataDrivenTestCase) -> None:
     """Perform a semantic analysis test case.
@@ -102,8 +105,9 @@ def test_semanal(testcase: DataDrivenTestCase) -> None:
 semanal_error_files = ['semanal-errors.test']
 
 
-class SemAnalErrorSuite(Suite):
-    def cases(self) -> List[DataDrivenTestCase]:
+class SemAnalErrorSuite(DataSuite):
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         # Read test cases from test case description files.
         c = []  # type: List[DataDrivenTestCase]
         for f in semanal_error_files:
@@ -111,6 +115,9 @@ class SemAnalErrorSuite(Suite):
                                   test_semanal_error, test_temp_dir, optional_out=True)
         return c
 
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        test_semanal_error(testcase)
+
 
 def test_semanal_error(testcase: DataDrivenTestCase) -> None:
     """Perform a test case."""
@@ -137,15 +144,16 @@ def test_semanal_error(testcase: DataDrivenTestCase) -> None:
 semanal_symtable_files = ['semanal-symtable.test']
 
 
-class SemAnalSymtableSuite(Suite):
-    def cases(self) -> List[DataDrivenTestCase]:
+class SemAnalSymtableSuite(DataSuite):
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         c = []  # type: List[DataDrivenTestCase]
         for f in semanal_symtable_files:
             c += parse_test_cases(os.path.join(test_data_prefix, f),
-                                  self.run_test, test_temp_dir)
+                                  None, test_temp_dir)
         return c
 
-    def run_test(self, testcase: DataDrivenTestCase) -> None:
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
         """Perform a test case."""
         try:
             # Build test case input.
@@ -175,16 +183,17 @@ class SemAnalSymtableSuite(Suite):
 semanal_typeinfo_files = ['semanal-typeinfo.test']
 
 
-class SemAnalTypeInfoSuite(Suite):
-    def cases(self) -> List[DataDrivenTestCase]:
+class SemAnalTypeInfoSuite(DataSuite):
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         """Test case descriptions"""
         c = []  # type: List[DataDrivenTestCase]
         for f in semanal_typeinfo_files:
             c += parse_test_cases(os.path.join(test_data_prefix, f),
-                                  self.run_test, test_temp_dir)
+                                  None, test_temp_dir)
         return c
 
-    def run_test(self, testcase: DataDrivenTestCase) -> None:
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
         """Perform a test case."""
         try:
             # Build test case input.
diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py
index 9f7ecfb..539080c 100644
--- a/mypy/test/teststubgen.py
+++ b/mypy/test/teststubgen.py
@@ -13,7 +13,7 @@ from typing import List, Tuple
 
 from mypy.myunit import Suite, AssertionFailure, assert_equal
 from mypy.test.helpers import assert_string_arrays_equal
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.data import DataSuite, parse_test_cases, DataDrivenTestCase
 from mypy.test import config
 from mypy.parse import parse
 from mypy.errors import CompileError
@@ -95,15 +95,19 @@ class StubgenUtilSuite(Suite):
         assert_equal(infer_sig_from_docstring('\nfunc x', 'func'), None)
 
 
-class StubgenPythonSuite(Suite):
+class StubgenPythonSuite(DataSuite):
     test_data_files = ['stubgen.test']
 
-    def cases(self) -> List[DataDrivenTestCase]:
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         c = []  # type: List[DataDrivenTestCase]
-        for path in self.test_data_files:
+        for path in cls.test_data_files:
             c += parse_test_cases(os.path.join(config.test_data_prefix, path), test_stubgen)
         return c
 
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        test_stubgen(testcase)
+
 
 def parse_flags(program_text: str) -> Options:
     flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE)
diff --git a/mypy/test/testtransform.py b/mypy/test/testtransform.py
index 0dcdd1d..852b967 100644
--- a/mypy/test/testtransform.py
+++ b/mypy/test/testtransform.py
@@ -6,20 +6,18 @@ from typing import Dict, List
 
 from mypy import build
 from mypy.build import BuildSource
-from mypy.myunit import Suite
 from mypy.test.helpers import (
     assert_string_arrays_equal, testfile_pyversion, normalize_error_messages
 )
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
 from mypy.test.config import test_data_prefix, test_temp_dir
 from mypy.errors import CompileError
-from mypy.nodes import TypeInfo
 from mypy.treetransform import TransformVisitor
 from mypy.types import Type
 from mypy.options import Options
 
 
-class TransformSuite(Suite):
+class TransformSuite(DataSuite):
     # Reuse semantic analysis test cases.
     transform_files = ['semanal-basic.test',
                        'semanal-expressions.test',
@@ -30,15 +28,19 @@ class TransformSuite(Suite):
                        'semanal-abstractclasses.test',
                        'semanal-python2.test']
 
-    def cases(self) -> List[DataDrivenTestCase]:
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         c = []  # type: List[DataDrivenTestCase]
-        for f in self.transform_files:
+        for f in cls.transform_files:
             c += parse_test_cases(os.path.join(test_data_prefix, f),
                                   test_transform,
                                   base_path=test_temp_dir,
                                   native_sep=True)
         return c
 
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        test_transform(testcase)
+
 
 def test_transform(testcase: DataDrivenTestCase) -> None:
     """Perform an identity transform test case."""
diff --git a/mypy/test/testtypegen.py b/mypy/test/testtypegen.py
index e60d2e7..7376d51 100644
--- a/mypy/test/testtypegen.py
+++ b/mypy/test/testtypegen.py
@@ -7,9 +7,8 @@ from typing import Set, List
 
 from mypy import build
 from mypy.build import BuildSource
-from mypy.myunit import Suite
 from mypy.test import config
-from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
 from mypy.test.helpers import assert_string_arrays_equal
 from mypy.util import short_type
 from mypy.nodes import (
@@ -20,18 +19,19 @@ from mypy.errors import CompileError
 from mypy.options import Options
 
 
-class TypeExportSuite(Suite):
+class TypeExportSuite(DataSuite):
     # List of files that contain test case descriptions.
     files = ['typexport-basic.test']
 
-    def cases(self) -> List[DataDrivenTestCase]:
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
         c = []  # type: List[DataDrivenTestCase]
-        for f in self.files:
+        for f in cls.files:
             c += parse_test_cases(os.path.join(config.test_data_prefix, f),
-                                  self.run_test, config.test_temp_dir)
+                                  None, config.test_temp_dir)
         return c
 
-    def run_test(self, testcase: DataDrivenTestCase) -> None:
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
         try:
             line = testcase.input[0]
             mask = ''
diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py
index 78fa2e1..3c7b618 100644
--- a/mypy/test/testtypes.py
+++ b/mypy/test/testtypes.py
@@ -10,9 +10,8 @@ from mypy.expandtype import expand_type
 from mypy.join import join_types, join_simple
 from mypy.meet import meet_types
 from mypy.types import (
-    UnboundType, AnyType, CallableType, TupleType, TypeVarDef, Type,
-    Instance, NoneTyp, Overloaded, TypeType, UnionType, UninhabitedType,
-    true_only, false_only, TypeVarId
+    UnboundType, AnyType, CallableType, TupleType, TypeVarDef, Type, Instance, NoneTyp, Overloaded,
+    TypeType, UnionType, UninhabitedType, true_only, false_only, TypeVarId, TypeOfAny
 )
 from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, CONTRAVARIANT, INVARIANT, COVARIANT
 from mypy.subtypes import is_subtype, is_more_precise, is_proper_subtype
@@ -28,51 +27,53 @@ class TypesSuite(Suite):
         self.function = self.fx.function
 
     def test_any(self) -> None:
-        assert_equal(str(AnyType()), 'Any')
+        assert_equal(str(AnyType(TypeOfAny.special_form)), 'Any')
 
     def test_simple_unbound_type(self) -> None:
         u = UnboundType('Foo')
         assert_equal(str(u), 'Foo?')
 
     def test_generic_unbound_type(self) -> None:
-        u = UnboundType('Foo', [UnboundType('T'), AnyType()])
+        u = UnboundType('Foo', [UnboundType('T'), AnyType(TypeOfAny.special_form)])
         assert_equal(str(u), 'Foo?[T?, Any]')
 
     def test_callable_type(self) -> None:
         c = CallableType([self.x, self.y],
                          [ARG_POS, ARG_POS],
                          [None, None],
-                         AnyType(), self.function)
+                         AnyType(TypeOfAny.special_form), self.function)
         assert_equal(str(c), 'def (X?, Y?) -> Any')
 
-        c2 = CallableType([], [], [], NoneTyp(), None)
+        c2 = CallableType([], [], [], NoneTyp(), self.fx.function)
         assert_equal(str(c2), 'def ()')
 
     def test_callable_type_with_default_args(self) -> None:
         c = CallableType([self.x, self.y], [ARG_POS, ARG_OPT], [None, None],
-                     AnyType(), self.function)
+                     AnyType(TypeOfAny.special_form), self.function)
         assert_equal(str(c), 'def (X?, Y? =) -> Any')
 
         c2 = CallableType([self.x, self.y], [ARG_OPT, ARG_OPT], [None, None],
-                      AnyType(), self.function)
+                      AnyType(TypeOfAny.special_form), self.function)
         assert_equal(str(c2), 'def (X? =, Y? =) -> Any')
 
     def test_callable_type_with_var_args(self) -> None:
-        c = CallableType([self.x], [ARG_STAR], [None], AnyType(), self.function)
+        c = CallableType([self.x], [ARG_STAR], [None], AnyType(TypeOfAny.special_form),
+                         self.function)
         assert_equal(str(c), 'def (*X?) -> Any')
 
         c2 = CallableType([self.x, self.y], [ARG_POS, ARG_STAR],
-                      [None, None], AnyType(), self.function)
+                      [None, None], AnyType(TypeOfAny.special_form), self.function)
         assert_equal(str(c2), 'def (X?, *Y?) -> Any')
 
         c3 = CallableType([self.x, self.y], [ARG_OPT, ARG_STAR], [None, None],
-                      AnyType(), self.function)
+                      AnyType(TypeOfAny.special_form), self.function)
         assert_equal(str(c3), 'def (X? =, *Y?) -> Any')
 
     def test_tuple_type(self) -> None:
-        assert_equal(str(TupleType([], None)), 'Tuple[]')
-        assert_equal(str(TupleType([self.x], None)), 'Tuple[X?]')
-        assert_equal(str(TupleType([self.x, AnyType()], None)), 'Tuple[X?, Any]')
+        assert_equal(str(TupleType([], self.fx.std_tuple)), 'Tuple[]')
+        assert_equal(str(TupleType([self.x], self.fx.std_tuple)), 'Tuple[X?]')
+        assert_equal(str(TupleType([self.x, AnyType(TypeOfAny.special_form)],
+                                   self.fx.std_tuple)), 'Tuple[X?, Any]')
 
     def test_type_variable_binding(self) -> None:
         assert_equal(str(TypeVarDef('X', 1, [], self.fx.o)), 'X')
@@ -175,11 +176,11 @@ class TypeOpsSuite(Suite):
         assert_true(is_more_precise(fx.b, fx.anyt))
         assert_true(is_more_precise(self.tuple(fx.b, fx.a),
                                     self.tuple(fx.b, fx.a)))
+        assert_true(is_more_precise(self.tuple(fx.b, fx.b),
+                                    self.tuple(fx.b, fx.a)))
 
         assert_false(is_more_precise(fx.a, fx.b))
         assert_false(is_more_precise(fx.anyt, fx.b))
-        assert_false(is_more_precise(self.tuple(fx.b, fx.b),
-                                     self.tuple(fx.b, fx.a)))
 
     # is_proper_subtype
 
@@ -247,7 +248,8 @@ class TypeOpsSuite(Suite):
         assert_false(tuple_type.can_be_true)
 
     def test_nonempty_tuple_always_true(self) -> None:
-        tuple_type = self.tuple(AnyType(), AnyType())
+        tuple_type = self.tuple(AnyType(TypeOfAny.special_form),
+                                AnyType(TypeOfAny.special_form))
         assert_true(tuple_type.can_be_true)
         assert_false(tuple_type.can_be_false)
 
@@ -274,7 +276,7 @@ class TypeOpsSuite(Suite):
         assert_type(UninhabitedType, to)
 
     def test_true_only_of_true_type_is_idempotent(self) -> None:
-        always_true = self.tuple(AnyType())
+        always_true = self.tuple(AnyType(TypeOfAny.special_form))
         to = true_only(always_true)
         assert_true(always_true is to)
 
@@ -288,7 +290,7 @@ class TypeOpsSuite(Suite):
         assert_true(self.fx.a.can_be_false)
 
     def test_true_only_of_union(self) -> None:
-        tup_type = self.tuple(AnyType())
+        tup_type = self.tuple(AnyType(TypeOfAny.special_form))
         # Union of something that is unknown, something that is always true, something
         # that is always false
         union_type = UnionType([self.fx.a, tup_type, self.tuple()])
@@ -300,7 +302,7 @@ class TypeOpsSuite(Suite):
         assert_true(to.items[1] is tup_type)
 
     def test_false_only_of_true_type_is_uninhabited(self) -> None:
-        fo = false_only(self.tuple(AnyType()))
+        fo = false_only(self.tuple(AnyType(TypeOfAny.special_form)))
         assert_type(UninhabitedType, fo)
 
     def test_false_only_of_false_type_is_idempotent(self) -> None:
@@ -321,7 +323,8 @@ class TypeOpsSuite(Suite):
         tup_type = self.tuple()
         # Union of something that is unknown, something that is always true, something
         # that is always false
-        union_type = UnionType([self.fx.a, self.tuple(AnyType()), tup_type])
+        union_type = UnionType([self.fx.a, self.tuple(AnyType(TypeOfAny.special_form)),
+                                tup_type])
         assert_equal(len(union_type.items), 3)
         fo = false_only(union_type)
         assert isinstance(fo, UnionType)
@@ -343,7 +346,7 @@ class TypeOpsSuite(Suite):
         tv = []  # type: List[TypeVarDef]
         n = -1
         for v in vars:
-            tv.append(TypeVarDef(v, n, None, self.fx.o))
+            tv.append(TypeVarDef(v, n, [], self.fx.o))
             n -= 1
         return CallableType(list(a[:-1]),
                             [ARG_POS] * (len(a) - 1),
@@ -443,7 +446,7 @@ class JoinSuite(Suite):
 
     def test_mixed_truth_restricted_type(self) -> None:
         # join_types against differently restricted truthiness types drops restrictions.
-        true_any = true_only(AnyType())
+        true_any = true_only(AnyType(TypeOfAny.special_form))
         false_o = false_only(self.fx.o)
         j = join_types(true_any, false_o)
         assert_true(j.can_be_true)
diff --git a/mypy/treetransform.py b/mypy/treetransform.py
index c7debd8..1343992 100644
--- a/mypy/treetransform.py
+++ b/mypy/treetransform.py
@@ -177,7 +177,7 @@ class TransformVisitor(NodeVisitor[Node]):
                        self.block(node.defs),
                        node.type_vars,
                        self.expressions(node.base_type_exprs),
-                       node.metaclass)
+                       self.optional_expr(node.metaclass))
         new.fullname = node.fullname
         new.info = node.info
         new.decorators = [self.expr(decorator)
@@ -340,7 +340,9 @@ class TransformVisitor(NodeVisitor[Node]):
         member = MemberExpr(self.expr(node.expr),
                             node.name)
         if node.def_var:
-            member.def_var = self.visit_var(node.def_var)
+            # This refers to an attribute and we don't transform attributes by default,
+            # just normal variables.
+            member.def_var = node.def_var
         self.copy_ref(member, node)
         return member
 
@@ -390,7 +392,9 @@ class TransformVisitor(NodeVisitor[Node]):
         return RevealTypeExpr(self.expr(node.expr))
 
     def visit_super_expr(self, node: SuperExpr) -> SuperExpr:
-        new = SuperExpr(node.name)
+        call = self.expr(node.call)
+        assert isinstance(call, CallExpr)
+        new = SuperExpr(node.name, call)
         new.info = node.info
         return new
 
diff --git a/mypy/tvar_scope.py b/mypy/tvar_scope.py
index 49f8359..66338f4 100644
--- a/mypy/tvar_scope.py
+++ b/mypy/tvar_scope.py
@@ -7,9 +7,9 @@ class TypeVarScope:
     """Scope that holds bindings for type variables. Node fullname -> TypeVarDef."""
 
     def __init__(self,
-                 parent: Optional['TypeVarScope'] = None,
+                 parent: 'Optional[TypeVarScope]' = None,
                  is_class_scope: bool = False,
-                 prohibited: Optional['TypeVarScope'] = None) -> None:
+                 prohibited: 'Optional[TypeVarScope]' = None) -> None:
         """Initializer for TypeVarScope
 
         Parameters:
@@ -28,7 +28,7 @@ class TypeVarScope:
             self.func_id = parent.func_id
             self.class_id = parent.class_id
 
-    def get_function_scope(self) -> Optional['TypeVarScope']:
+    def get_function_scope(self) -> 'Optional[TypeVarScope]':
         """Get the nearest parent that's a function scope, not a class scope"""
         it = self  # type: Optional[TypeVarScope]
         while it is not None and it.is_class_scope:
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index fe95a5f..3119c19 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -1,25 +1,27 @@
 """Semantic analysis of types"""
 
 from collections import OrderedDict
-from typing import Callable, List, Optional, Set, Tuple, Iterator, TypeVar, Iterable
+from typing import Callable, List, Optional, Set, Tuple, Iterator, TypeVar, Iterable, Dict
 from itertools import chain
 
 from contextlib import contextmanager
 
+import itertools
+
 from mypy.messages import MessageBuilder
 from mypy.options import Options
 from mypy.types import (
-    Type, UnboundType, TypeVarType, TupleType, TypedDictType, UnionType, Instance,
-    AnyType, CallableType, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor,
-    SyntheticTypeVisitor,
+    Type, UnboundType, TypeVarType, TupleType, TypedDictType, UnionType, Instance, AnyType,
+    CallableType, NoneTyp, DeletedType, TypeList, TypeVarDef, TypeVisitor, SyntheticTypeVisitor,
     StarType, PartialType, EllipsisType, UninhabitedType, TypeType, get_typ_args, set_typ_args,
-    CallableArgument, get_type_vars, TypeQuery, union_items
+    CallableArgument, get_type_vars, TypeQuery, union_items, TypeOfAny, ForwardRef
 )
 
 from mypy.nodes import (
     TVAR, TYPE_ALIAS, UNBOUND_IMPORTED, TypeInfo, Context, SymbolTableNode, Var, Expression,
     IndexExpr, RefExpr, nongen_builtins, check_arg_names, check_arg_kinds, ARG_POS, ARG_NAMED,
-    ARG_OPT, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, TypeVarExpr
+    ARG_OPT, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, TypeVarExpr, FuncDef, CallExpr, NameExpr,
+    Decorator
 )
 from mypy.tvar_scope import TypeVarScope
 from mypy.sametypes import is_same_type
@@ -55,10 +57,13 @@ def analyze_type_alias(node: Expression,
                        lookup_fqn_func: Callable[[str], SymbolTableNode],
                        tvar_scope: TypeVarScope,
                        fail_func: Callable[[str, Context], None],
+                       note_func: Callable[[str, Context], None],
                        plugin: Plugin,
                        options: Options,
                        is_typeshed_stub: bool,
-                       allow_unnormalized: bool = False) -> Optional[Type]:
+                       allow_unnormalized: bool = False,
+                       in_dynamic_func: bool = False,
+                       global_scope: bool = True) -> Optional[Type]:
     """Return type if node is valid as a type alias rvalue.
 
     Return None otherwise. 'node' must have been semantically analyzed.
@@ -91,6 +96,16 @@ def analyze_type_alias(node: Expression,
                 return None
         else:
             return None
+    elif isinstance(node, CallExpr):
+        if (isinstance(node.callee, NameExpr) and len(node.args) == 1 and
+                isinstance(node.args[0], NameExpr)):
+            call = lookup_func(node.callee.name, node.callee)
+            arg = lookup_func(node.args[0].name, node.args[0])
+            if (call is not None and call.node and call.node.fullname() == 'builtins.type' and
+                    arg is not None and arg.node and arg.node.fullname() == 'builtins.None'):
+                return NoneTyp()
+            return None
+        return None
     else:
         return None
 
@@ -100,8 +115,11 @@ def analyze_type_alias(node: Expression,
     except TypeTranslationError:
         fail_func('Invalid type alias', node)
         return None
-    analyzer = TypeAnalyser(lookup_func, lookup_fqn_func, tvar_scope, fail_func, plugin, options,
-                            is_typeshed_stub, aliasing=True, allow_unnormalized=allow_unnormalized)
+    analyzer = TypeAnalyser(lookup_func, lookup_fqn_func, tvar_scope, fail_func, note_func,
+                            plugin, options, is_typeshed_stub, aliasing=True,
+                            allow_unnormalized=allow_unnormalized)
+    analyzer.in_dynamic_func = in_dynamic_func
+    analyzer.global_scope = global_scope
     return type.accept(analyzer)
 
 
@@ -119,20 +137,28 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
     Converts unbound types into bound types.
     """
 
+    # Is this called from an untyped function definition?
+    in_dynamic_func = False  # type: bool
+    # Is this called from global scope?
+    global_scope = True  # type: bool
+
     def __init__(self,
                  lookup_func: Callable[[str, Context], SymbolTableNode],
                  lookup_fqn_func: Callable[[str], SymbolTableNode],
-                 tvar_scope: TypeVarScope,
+                 tvar_scope: Optional[TypeVarScope],
                  fail_func: Callable[[str, Context], None],
+                 note_func: Callable[[str, Context], None],
                  plugin: Plugin,
                  options: Options,
                  is_typeshed_stub: bool, *,
                  aliasing: bool = False,
                  allow_tuple_literal: bool = False,
-                 allow_unnormalized: bool = False) -> None:
+                 allow_unnormalized: bool = False,
+                 third_pass: bool = False) -> None:
         self.lookup = lookup_func
         self.lookup_fqn_func = lookup_fqn_func
         self.fail_func = fail_func
+        self.note_func = note_func
         self.tvar_scope = tvar_scope
         self.aliasing = aliasing
         self.allow_tuple_literal = allow_tuple_literal
@@ -142,6 +168,7 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
         self.plugin = plugin
         self.options = options
         self.is_typeshed_stub = is_typeshed_stub
+        self.third_pass = third_pass
 
     def visit_unbound_type(self, t: UnboundType) -> Type:
         if t.optional:
@@ -149,13 +176,13 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
             # We don't need to worry about double-wrapping Optionals or
             # wrapping Anys: Union simplification will take care of that.
             return make_optional_type(self.visit_unbound_type(t))
-        sym = self.lookup(t.name, t)
+        sym = self.lookup(t.name, t, suppress_errors=self.third_pass)  # type: ignore
         if sym is not None:
             if sym.node is None:
                 # UNBOUND_IMPORTED can happen if an unknown name was imported.
                 if sym.kind != UNBOUND_IMPORTED:
                     self.fail('Internal error (node is None, kind={})'.format(sym.kind), t)
-                return AnyType()
+                return AnyType(TypeOfAny.special_form)
             fullname = sym.node.fullname()
             hook = self.plugin.get_type_analyze_hook(fullname)
             if hook:
@@ -163,7 +190,10 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
             if (fullname in nongen_builtins and t.args and
                     not sym.normalized and not self.allow_unnormalized):
                 self.fail(no_subscript_builtin_alias(fullname), t)
-            tvar_def = self.tvar_scope.get_binding(sym)
+            if self.tvar_scope:
+                tvar_def = self.tvar_scope.get_binding(sym)
+            else:
+                tvar_def = None
             if sym.kind == TVAR and tvar_def is not None:
                 if len(t.args) > 0:
                     self.fail('Type variable "{}" used with arguments'.format(
@@ -172,7 +202,7 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
             elif fullname == 'builtins.None':
                 return NoneTyp()
             elif fullname == 'typing.Any' or fullname == 'builtins.Any':
-                return AnyType(explicit=True)
+                return AnyType(TypeOfAny.explicit)
             elif fullname == 'typing.Tuple':
                 if len(t.args) == 0 and not t.empty_tuple_index:
                     # Bare 'Tuple' is same as 'tuple'
@@ -193,14 +223,15 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
             elif fullname == 'typing.Optional':
                 if len(t.args) != 1:
                     self.fail('Optional[...] must have exactly one type argument', t)
-                    return AnyType()
+                    return AnyType(TypeOfAny.from_error)
                 item = self.anal_type(t.args[0])
                 return make_optional_type(item)
             elif fullname == 'typing.Callable':
                 return self.analyze_callable_type(t)
             elif fullname == 'typing.Type':
                 if len(t.args) == 0:
-                    any_type = AnyType(from_omitted_generics=True, line=t.line, column=t.column)
+                    any_type = AnyType(TypeOfAny.from_omitted_generics,
+                                       line=t.line, column=t.column)
                     return TypeType(any_type, line=t.line, column=t.column)
                 if len(t.args) != 1:
                     self.fail('Type[...] must have exactly one type argument', t)
@@ -210,14 +241,14 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
                 if self.nesting_level > 0:
                     self.fail('Invalid type: ClassVar nested inside other type', t)
                 if len(t.args) == 0:
-                    return AnyType(line=t.line)
+                    return AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column)
                 if len(t.args) != 1:
                     self.fail('ClassVar[...] must have at most one type argument', t)
-                    return AnyType()
+                    return AnyType(TypeOfAny.from_error)
                 item = self.anal_type(t.args[0])
                 if isinstance(item, TypeVarType) or get_type_vars(item):
                     self.fail('Invalid type: ClassVar cannot be generic', t)
-                    return AnyType()
+                    return AnyType(TypeOfAny.from_error)
                 return item
             elif fullname in ('mypy_extensions.NoReturn', 'typing.NoReturn'):
                 return UninhabitedType(is_noreturn=True)
@@ -253,17 +284,26 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
                     # context. This is slightly problematic as it allows using the type 'Any'
                     # as a base class -- however, this will fail soon at runtime so the problem
                     # is pretty minor.
-                    return AnyType(from_unimported_type=True)
+                    return AnyType(TypeOfAny.from_unimported_type)
                 # Allow unbound type variables when defining an alias
                 if not (self.aliasing and sym.kind == TVAR and
-                        self.tvar_scope.get_binding(sym) is None):
+                        (not self.tvar_scope or self.tvar_scope.get_binding(sym) is None)):
+                    if (not self.third_pass and not self.in_dynamic_func and
+                            not (isinstance(sym.node, (FuncDef, Decorator)) or
+                                 isinstance(sym.node, Var) and sym.node.is_ready) and
+                            not (sym.kind == TVAR and tvar_def is None)):
+                        if t.args and not self.global_scope:
+                            self.fail('Unsupported forward reference to "{}"'.format(t.name), t)
+                            return AnyType(TypeOfAny.from_error)
+                        return ForwardRef(t)
                     self.fail('Invalid type "{}"'.format(name), t)
+                    if self.third_pass and sym.kind == TVAR:
+                        self.note_func("Forward references to type variables are prohibited", t)
                 return t
             info = sym.node  # type: TypeInfo
             if len(t.args) > 0 and info.fullname() == 'builtins.tuple':
-                return TupleType(self.anal_array(t.args),
-                                 Instance(info, [AnyType()], t.line),
-                                 t.line)
+                fallback = Instance(info, [AnyType(TypeOfAny.special_form)], t.line)
+                return TupleType(self.anal_array(t.args), fallback, t.line)
             else:
                 # Analyze arguments and construct Instance type. The
                 # number of type arguments and their values are
@@ -278,7 +318,7 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
                     # represented as a tuple type.
                     if t.args:
                         self.fail('Generic tuple types not supported', t)
-                        return AnyType()
+                        return AnyType(TypeOfAny.from_error)
                     return tup.copy_modified(items=self.anal_array(tup.items),
                                              fallback=instance)
                 td = info.typeddict_type
@@ -287,13 +327,16 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
                     # represented as a typeddict type.
                     if t.args:
                         self.fail('Generic TypedDict types not supported', t)
-                        return AnyType()
+                        return AnyType(TypeOfAny.from_error)
                     # Create a named TypedDictType
                     return td.copy_modified(item_types=self.anal_array(list(td.items.values())),
                                             fallback=instance)
                 return instance
         else:
-            return AnyType()
+            if self.third_pass:
+                self.fail('Invalid type "{}"'.format(t.name), t)
+                return AnyType(TypeOfAny.from_error)
+            return AnyType(TypeOfAny.special_form)
 
     def visit_any(self, t: AnyType) -> Type:
         return t
@@ -309,11 +352,11 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
 
     def visit_type_list(self, t: TypeList) -> Type:
         self.fail('Invalid type', t)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     def visit_callable_argument(self, t: CallableArgument) -> Type:
         self.fail('Invalid type', t)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     def visit_instance(self, t: Instance) -> Type:
         return t
@@ -339,17 +382,18 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
         # generate errors elsewhere, and Tuple[t1, t2, ...] must be used instead.
         if t.implicit and not self.allow_tuple_literal:
             self.fail('Invalid tuple literal type', t)
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         star_count = sum(1 for item in t.items if isinstance(item, StarType))
         if star_count > 1:
             self.fail('At most one star type allowed in a tuple', t)
             if t.implicit:
-                return TupleType([AnyType() for _ in t.items],
+                return TupleType([AnyType(TypeOfAny.from_error) for _ in t.items],
                                  self.named_type('builtins.tuple'),
                                  t.line)
             else:
-                return AnyType()
-        fallback = t.fallback if t.fallback else self.named_type('builtins.tuple', [AnyType()])
+                return AnyType(TypeOfAny.from_error)
+        any_type = AnyType(TypeOfAny.special_form)
+        fallback = t.fallback if t.fallback else self.named_type('builtins.tuple', [any_type])
         return TupleType(self.anal_array(t.items), fallback, t.line)
 
     def visit_typeddict_type(self, t: TypedDictType) -> Type:
@@ -370,16 +414,20 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
 
     def visit_ellipsis_type(self, t: EllipsisType) -> Type:
         self.fail("Unexpected '...'", t)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     def visit_type_type(self, t: TypeType) -> Type:
         return TypeType.make_normalized(self.anal_type(t.item), line=t.line)
 
+    def visit_forwardref_type(self, t: ForwardRef) -> Type:
+        return t
+
     def analyze_callable_type(self, t: UnboundType) -> Type:
         fallback = self.named_type('builtins.function')
         if len(t.args) == 0:
             # Callable (bare). Treat as Callable[..., Any].
-            any_type = AnyType(from_omitted_generics=True, line=t.line, column=t.column)
+            any_type = AnyType(TypeOfAny.from_omitted_generics,
+                               line=t.line, column=t.column)
             ret = CallableType([any_type, any_type],
                                [nodes.ARG_STAR, nodes.ARG_STAR2],
                                [None, None],
@@ -392,7 +440,7 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
                 # Callable[[ARG, ...], RET] (ordinary callable type)
                 analyzed_args = self.analyze_callable_args(t.args[0])
                 if analyzed_args is None:
-                    return AnyType()
+                    return AnyType(TypeOfAny.from_error)
                 args, kinds, names = analyzed_args
                 ret = CallableType(args,
                                    kinds,
@@ -401,7 +449,8 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
                                    fallback=fallback)
             elif isinstance(t.args[0], EllipsisType):
                 # Callable[..., RET] (with literal ellipsis; accept arbitrary arguments)
-                ret = CallableType([AnyType(), AnyType()],
+                ret = CallableType([AnyType(TypeOfAny.explicit),
+                                    AnyType(TypeOfAny.explicit)],
                                    [nodes.ARG_STAR, nodes.ARG_STAR2],
                                    [None, None],
                                    ret_type=ret_type,
@@ -409,10 +458,10 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
                                    is_ellipsis_args=True)
             else:
                 self.fail('The first argument to Callable must be a list of types or "..."', t)
-                return AnyType()
+                return AnyType(TypeOfAny.from_error)
         else:
             self.fail('Invalid function type', t)
-            return AnyType()
+            return AnyType(TypeOfAny.from_error)
         assert isinstance(ret, CallableType)
         return ret.accept(self)
 
@@ -462,13 +511,18 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
     @contextmanager
     def tvar_scope_frame(self) -> Iterator[None]:
         old_scope = self.tvar_scope
-        self.tvar_scope = self.tvar_scope.method_frame()
+        if self.tvar_scope:
+            self.tvar_scope = self.tvar_scope.method_frame()
+        else:
+            assert self.third_pass, "Internal error: type variable scope not given"
         yield
         self.tvar_scope = old_scope
 
     def infer_type_variables(self,
                              type: CallableType) -> List[Tuple[str, TypeVarExpr]]:
         """Return list of unique type variables referred to in a callable."""
+        if not self.tvar_scope:
+            return []  # We are in third pass, nothing new here
         names = []  # type: List[str]
         tvars = []  # type: List[TypeVarExpr]
         for arg in type.arg_types:
@@ -490,6 +544,8 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
     def bind_function_type_variables(self,
                                      fun_type: CallableType, defn: Context) -> List[TypeVarDef]:
         """Find the type variables of the function type and bind them in our tvar_scope"""
+        if not self.tvar_scope:
+            return []  # We are in third pass, nothing new here
         if fun_type.variables:
             for var in fun_type.variables:
                 var_expr = self.lookup(var.name, var).node
@@ -512,7 +568,8 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
         return defs
 
     def is_defined_type_var(self, tvar: str, context: Context) -> bool:
-        return self.tvar_scope.get_binding(self.lookup(tvar, context)) is not None
+        return (self.tvar_scope is not None and
+                self.tvar_scope.get_binding(self.lookup(tvar, context)) is not None)
 
     def anal_array(self, a: List[Type], nested: bool = True) -> List[Type]:
         res = []  # type: List[Type]
@@ -539,16 +596,18 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], AnalyzerPluginInterface):
         return a
 
     def named_type(self, fully_qualified_name: str,
-                   args: List[Type] = None,
+                   args: Optional[List[Type]] = None,
                    line: int = -1,
                    column: int = -1) -> Instance:
         node = self.lookup_fqn_func(fully_qualified_name)
         assert isinstance(node.node, TypeInfo)
-        return Instance(node.node, args or [AnyType()] * len(node.node.defn.type_vars),
+        any_type = AnyType(TypeOfAny.special_form)
+        return Instance(node.node, args or [any_type] * len(node.node.defn.type_vars),
                         line=line, column=column)
 
     def tuple_type(self, items: List[Type]) -> TupleType:
-        return TupleType(items, fallback=self.named_type('builtins.tuple', [AnyType()]))
+        any_type = AnyType(TypeOfAny.special_form)
+        return TupleType(items, fallback=self.named_type('builtins.tuple', [any_type]))
 
 
 class TypeAnalyserPass3(TypeVisitor[None]):
@@ -572,15 +631,27 @@ class TypeAnalyserPass3(TypeVisitor[None]):
     """
 
     def __init__(self,
+                 lookup_func: Callable[[str, Context], SymbolTableNode],
+                 lookup_fqn_func: Callable[[str], SymbolTableNode],
                  fail_func: Callable[[str, Context], None],
+                 note_func: Callable[[str, Context], None],
+                 plugin: Plugin,
                  options: Options,
-                 is_typeshed_stub: bool) -> None:
+                 is_typeshed_stub: bool,
+                 indicator: Dict[str, bool]) -> None:
+        self.lookup_func = lookup_func
+        self.lookup_fqn_func = lookup_fqn_func
         self.fail = fail_func
+        self.note_func = note_func
         self.options = options
+        self.plugin = plugin
         self.is_typeshed_stub = is_typeshed_stub
+        self.indicator = indicator
 
     def visit_instance(self, t: Instance) -> None:
         info = t.type
+        if info.replaced or info.tuple_type:
+            self.indicator['synthetic'] = True
         # Check type argument count.
         if len(t.args) != len(info.type_vars):
             if len(t.args) == 0:
@@ -591,8 +662,13 @@ class TypeAnalyserPass3(TypeVisitor[None]):
                     alternative = nongen_builtins[t.type.fullname()]
                     self.fail(messages.IMPLICIT_GENERIC_ANY_BUILTIN.format(alternative), t)
                 # Insert implicit 'Any' type arguments.
-                any_type = AnyType(from_omitted_generics=not from_builtins, line=t.line,
-                                   column=t.line)
+                if from_builtins:
+                    # this 'Any' was already reported elsewhere
+                    any_type = AnyType(TypeOfAny.special_form,
+                                       line=t.line, column=t.column)
+                else:
+                    any_type = AnyType(TypeOfAny.from_omitted_generics,
+                                       line=t.line, column=t.column)
                 t.args = [any_type] * len(info.type_vars)
                 return
             # Invalid number of type parameters.
@@ -610,10 +686,11 @@ class TypeAnalyserPass3(TypeVisitor[None]):
             # Construct the correct number of type arguments, as
             # otherwise the type checker may crash as it expects
             # things to be right.
-            t.args = [AnyType() for _ in info.type_vars]
+            t.args = [AnyType(TypeOfAny.from_error) for _ in info.type_vars]
             t.invalid = True
         elif info.defn.type_vars:
             # Check type argument values.
+            # TODO: Calling is_subtype and is_same_types in semantic analysis is a bad idea
             for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars):
                 if tvar.values:
                     if isinstance(arg, TypeVarType):
@@ -625,29 +702,47 @@ class TypeAnalyserPass3(TypeVisitor[None]):
                             continue
                     else:
                         arg_values = [arg]
-                    self.check_type_var_values(info, arg_values,
-                                               tvar.values, i + 1, t)
-                if not is_subtype(arg, tvar.upper_bound):
+                    self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t)
+                # TODO: These hacks will be not necessary when this will be moved to later stage.
+                arg = self.update_type(arg)
+                bound = self.update_type(tvar.upper_bound)
+                if not is_subtype(arg, bound):
                     self.fail('Type argument "{}" of "{}" must be '
                               'a subtype of "{}"'.format(
-                                  arg, info.name(), tvar.upper_bound), t)
+                                  arg, info.name(), bound), t)
         for arg in t.args:
             arg.accept(self)
         if info.is_newtype:
             for base in info.bases:
                 base.accept(self)
 
-    def check_type_var_values(self, type: TypeInfo, actuals: List[Type],
+    def check_type_var_values(self, type: TypeInfo, actuals: List[Type], arg_name: str,
                               valids: List[Type], arg_number: int, context: Context) -> None:
         for actual in actuals:
+            actual = self.update_type(actual)
             if (not isinstance(actual, AnyType) and
-                    not any(is_same_type(actual, value) for value in valids)):
+                    not any(is_same_type(actual, self.update_type(value)) for value in valids)):
                 if len(actuals) > 1 or not isinstance(actual, Instance):
                     self.fail('Invalid type argument value for "{}"'.format(
                         type.name()), context)
                 else:
-                    self.fail('Type argument {} of "{}" has incompatible value "{}"'.format(
-                        arg_number, type.name(), actual.type.name()), context)
+                    class_name = '"{}"'.format(type.name())
+                    actual_type_name = '"{}"'.format(actual.type.name())
+                    self.fail(messages.INCOMPATIBLE_TYPEVAR_VALUE.format(
+                        arg_name, class_name, actual_type_name), context)
+
+    def update_type(self, tp: Type) -> Type:
+        # This helper is only needed while is_subtype and is_same_type are
+        # called in third pass. This can be removed when TODO in visit_instance is fixed.
+        if isinstance(tp, ForwardRef):
+            tp = tp.link
+        if isinstance(tp, Instance) and tp.type.replaced:
+            replaced = tp.type.replaced
+            if replaced.tuple_type:
+                tp = replaced.tuple_type
+            if replaced.typeddict_type:
+                tp = replaced.typeddict_type
+        return tp
 
     def visit_callable_type(self, t: CallableType) -> None:
         t.ret_type.accept(self)
@@ -690,13 +785,34 @@ class TypeAnalyserPass3(TypeVisitor[None]):
         self.fail('Invalid type', t)
 
     def visit_type_var(self, t: TypeVarType) -> None:
-        pass
+        if t.upper_bound:
+            t.upper_bound.accept(self)
+        if t.values:
+            for v in t.values:
+                v.accept(self)
 
     def visit_partial_type(self, t: PartialType) -> None:
         pass
 
     def visit_type_type(self, t: TypeType) -> None:
-        pass
+        t.item.accept(self)
+
+    def visit_forwardref_type(self, t: ForwardRef) -> None:
+        self.indicator['forward'] = True
+        if isinstance(t.link, UnboundType):
+            t.link = self.anal_type(t.link)
+
+    def anal_type(self, tp: UnboundType) -> Type:
+        tpan = TypeAnalyser(self.lookup_func,
+                            self.lookup_fqn_func,
+                            None,
+                            self.fail,
+                            self.note_func,
+                            self.plugin,
+                            self.options,
+                            self.is_typeshed_stub,
+                            third_pass=True)
+        return tp.accept(tpan)
 
 
 TypeVarList = List[Tuple[str, TypeVarExpr]]
@@ -725,7 +841,11 @@ def replace_alias_tvars(tp: Type, vars: List[str], subs: List[Type],
 
 def set_any_tvars(tp: Type, vars: List[str],
                   newline: int, newcolumn: int, implicit: bool = True) -> Type:
-    any_type = AnyType(from_omitted_generics=implicit, line=newline, column=newcolumn)
+    if implicit:
+        type_of_any = TypeOfAny.from_omitted_generics
+    else:
+        type_of_any = TypeOfAny.special_form
+    any_type = AnyType(type_of_any, line=newline, column=newcolumn)
     return replace_alias_tvars(tp, vars, [any_type] * len(vars), newline, newcolumn)
 
 
@@ -808,7 +928,7 @@ class HasExplicitAny(TypeQuery[bool]):
         super().__init__(any)
 
     def visit_any(self, t: AnyType) -> bool:
-        return t.explicit
+        return t.type_of_any == TypeOfAny.explicit
 
     def visit_typeddict_type(self, t: TypedDictType) -> bool:
         # typeddict is checked during TypedDict declaration, so don't typecheck it here.
@@ -829,7 +949,7 @@ class HasAnyFromUnimportedType(TypeQuery[bool]):
         super().__init__(any)
 
     def visit_any(self, t: AnyType) -> bool:
-        return t.from_unimported_type
+        return t.type_of_any == TypeOfAny.from_unimported_type
 
     def visit_typeddict_type(self, t: TypedDictType) -> bool:
         # typeddict is checked during TypedDict declaration, so don't typecheck it here
@@ -856,6 +976,25 @@ class CollectAnyTypesQuery(TypeQuery[List[AnyType]]):
         return result
 
 
+def collect_all_inner_types(t: Type) -> List[Type]:
+    """
+    Return all types that `t` contains
+    """
+    return t.accept(CollectAllInnerTypesQuery())
+
+
+class CollectAllInnerTypesQuery(TypeQuery[List[Type]]):
+    def __init__(self) -> None:
+        super().__init__(self.combine_lists_strategy)
+
+    def query_types(self, types: Iterable[Type]) -> List[Type]:
+        return self.strategy(t.accept(self) for t in types) + list(types)
+
+    @classmethod
+    def combine_lists_strategy(cls, it: Iterable[List[Type]]) -> List[Type]:
+        return list(itertools.chain.from_iterable(it))
+
+
 def make_optional_type(t: Type) -> Type:
     """Return the type corresponding to Optional[t].
 
diff --git a/mypy/typefixture.py b/mypy/typefixture.py
index 87ddd06..ead0c53 100644
--- a/mypy/typefixture.py
+++ b/mypy/typefixture.py
@@ -3,11 +3,11 @@
 It contains class TypeInfos and Type objects.
 """
 
-from typing import List
+from typing import List, Optional
 
 from mypy.types import (
-    Type, TypeVarType, AnyType, NoneTyp,
-    Instance, CallableType, TypeVarDef, TypeType, UninhabitedType
+    Type, TypeVarType, AnyType, NoneTyp, Instance, CallableType, TypeVarDef, TypeType,
+    UninhabitedType, TypeOfAny
 )
 from mypy.nodes import (
     TypeInfo, ClassDef, Block, ARG_POS, ARG_OPT, ARG_STAR, SymbolTable,
@@ -40,7 +40,7 @@ class TypeFixture:
         self.sf1 = make_type_var('S', -1, [], self.o, variance)  # S`-1 (type variable)
 
         # Simple types
-        self.anyt = AnyType()
+        self.anyt = AnyType(TypeOfAny.special_form)
         self.nonet = NoneTyp()
         self.uninhabited = UninhabitedType()
 
@@ -192,12 +192,12 @@ class TypeFixture:
                             a[-1], self.function)
 
     def make_type_info(self, name: str,
-                       module_name: str = None,
+                       module_name: Optional[str] = None,
                        is_abstract: bool = False,
-                       mro: List[TypeInfo] = None,
-                       bases: List[Instance] = None,
-                       typevars: List[str] = None,
-                       variances: List[int] = None) -> TypeInfo:
+                       mro: Optional[List[TypeInfo]] = None,
+                       bases: Optional[List[Instance]] = None,
+                       typevars: Optional[List[str]] = None,
+                       variances: Optional[List[int]] = None) -> TypeInfo:
         """Make a TypeInfo suitable for use in unit tests."""
 
         class_def = ClassDef(name, Block([]), None, [])
diff --git a/mypy/types.py b/mypy/types.py
index 051c320..dcc8459 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -5,7 +5,7 @@ from abc import abstractmethod
 from collections import OrderedDict
 from typing import (
     Any, TypeVar, Dict, List, Tuple, cast, Generic, Set, Optional, Union, Iterable, NamedTuple,
-    Callable,
+    Callable
 )
 
 import mypy.nodes
@@ -172,7 +172,7 @@ class UnboundType(Type):
 
     def __init__(self,
                  name: str,
-                 args: List[Type] = None,
+                 args: Optional[List[Type]] = None,
                  line: int = -1,
                  column: int = -1,
                  optional: bool = False,
@@ -188,6 +188,15 @@ class UnboundType(Type):
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_unbound_type(self)
 
+    def __hash__(self) -> int:
+        return hash((self.name, self.optional, tuple(self.args)))
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, UnboundType):
+            return NotImplemented
+        return (self.name == other.name and self.optional == other.optional and
+                self.args == other.args)
+
     def serialize(self) -> JsonDict:
         return {'.class': 'UnboundType',
                 'name': self.name,
@@ -250,65 +259,85 @@ class TypeList(Type):
 _dummy = object()  # type: Any
 
 
+class TypeOfAny:
+    """
+    This class describes different types of Any. Each 'Any' can be of only one type at a time.
+
+    TODO: this class should be made an Enum once we drop support for python 3.3.
+    """
+    MYPY = False
+    if MYPY:
+        from typing import NewType
+        TypeOfAny = NewType('TypeOfAny', str)
+    else:
+        def TypeOfAny(x: str) -> str:
+            return x
+
+    # Was this Any type was inferred without a type annotation?
+    unannotated = TypeOfAny('unannotated')
+    # Does this Any come from an explicit type annotation?
+    explicit = TypeOfAny('explicit')
+    # Does this come from an unfollowed import? See --disallow-any=unimported option
+    from_unimported_type = TypeOfAny('from_unimported_type')
+    # Does this Any type come from omitted generics?
+    from_omitted_generics = TypeOfAny('from_omitted_generics')
+    # Does this Any come from an error?
+    from_error = TypeOfAny('from_error')
+    # Is this a type that can't be represented in mypy's type system? For instance, type of
+    # call to NewType(...)). Even though these types aren't real Anys, we treat them as such.
+    special_form = TypeOfAny('special_form')
+    # Does this Any come from interaction with another Any?
+    from_another_any = TypeOfAny('from_another_any')
+
+
 class AnyType(Type):
     """The type 'Any'."""
 
     def __init__(self,
-                 implicit: bool = False,
-                 from_unimported_type: bool = False,
-                 explicit: bool = False,
-                 from_omitted_generics: bool = False,
-                 special_form: bool = False,
+                 type_of_any: TypeOfAny.TypeOfAny,
+                 source_any: Optional['AnyType'] = None,
                  line: int = -1,
                  column: int = -1) -> None:
         super().__init__(line, column)
-        # Was this Any type was inferred without a type annotation?
-        # Note that this is not always the opposite of explicit.
-        # For instance, if "Any" comes from an unimported type,
-        # both explicit and implicit will be False
-        self.implicit = implicit
-        # Does this come from an unfollowed import? See --disallow-any=unimported option
-        self.from_unimported_type = from_unimported_type
-        # Does this Any come from an explicit type annotation?
-        self.explicit = explicit
-        # Does this type come from omitted generics?
-        self.from_omitted_generics = from_omitted_generics
-        # Is this a type that can't be represented in mypy's type system? For instance, type of
-        # call to NewType(...)). Even though these types aren't real Anys, we treat them as such.
-        self.special_form = special_form
+        self.type_of_any = type_of_any
+        # If this Any was created as a result of interacting with another 'Any', record the source
+        # and use it in reports.
+        self.source_any = source_any
+        if source_any and source_any.source_any:
+            self.source_any = source_any.source_any
+
+        # Only Anys that come from another Any can have source_any.
+        assert type_of_any != TypeOfAny.from_another_any or source_any is not None
+        # We should not have chains of Anys.
+        assert not self.source_any or self.source_any.type_of_any != TypeOfAny.from_another_any
 
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_any(self)
 
     def copy_modified(self,
-                      implicit: bool = _dummy,
-                      from_unimported_type: bool = _dummy,
-                      explicit: bool = _dummy,
-                      from_omitted_generics: bool = _dummy,
-                      special_form: bool = _dummy,
+                      type_of_any: TypeOfAny.TypeOfAny = _dummy,
+                      original_any: Optional['AnyType'] = _dummy,
                       ) -> 'AnyType':
-        if implicit is _dummy:
-            implicit = self.implicit
-        if from_unimported_type is _dummy:
-            from_unimported_type = self.from_unimported_type
-        if explicit is _dummy:
-            explicit = self.explicit
-        if from_omitted_generics is _dummy:
-            from_omitted_generics = self.from_omitted_generics
-        if special_form is _dummy:
-            special_form = self.special_form
-        return AnyType(implicit=implicit, from_unimported_type=from_unimported_type,
-                       explicit=explicit, from_omitted_generics=from_omitted_generics,
-                       special_form=special_form,
+        if type_of_any is _dummy:
+            type_of_any = self.type_of_any
+        if original_any is _dummy:
+            original_any = self.source_any
+        return AnyType(type_of_any=type_of_any, source_any=original_any,
                        line=self.line, column=self.column)
 
+    def __hash__(self) -> int:
+        return hash(AnyType)
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, AnyType)
+
     def serialize(self) -> JsonDict:
         return {'.class': 'AnyType'}
 
     @classmethod
     def deserialize(cls, data: JsonDict) -> 'AnyType':
         assert data['.class'] == 'AnyType'
-        return AnyType()
+        return AnyType(TypeOfAny.special_form)
 
 
 class UninhabitedType(Type):
@@ -336,6 +365,12 @@ class UninhabitedType(Type):
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_uninhabited_type(self)
 
+    def __hash__(self) -> int:
+        return hash(UninhabitedType)
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, UninhabitedType)
+
     def serialize(self) -> JsonDict:
         return {'.class': 'UninhabitedType',
                 'is_noreturn': self.is_noreturn}
@@ -357,6 +392,12 @@ class NoneTyp(Type):
     def __init__(self, line: int = -1, column: int = -1) -> None:
         super().__init__(line, column)
 
+    def __hash__(self) -> int:
+        return hash(NoneTyp)
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, NoneTyp)
+
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_none_type(self)
 
@@ -388,7 +429,7 @@ class DeletedType(Type):
 
     source = ''  # type: Optional[str]  # May be None; name that generated this value
 
-    def __init__(self, source: str = None, line: int = -1, column: int = -1) -> None:
+    def __init__(self, source: Optional[str] = None, line: int = -1, column: int = -1) -> None:
         self.source = source
         super().__init__(line, column)
 
@@ -436,6 +477,14 @@ class Instance(Type):
 
     type_ref = None  # type: str
 
+    def __hash__(self) -> int:
+        return hash((self.type, tuple(self.args)))
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Instance):
+            return NotImplemented
+        return self.type == other.type and self.args == other.args
+
     def serialize(self) -> Union[JsonDict, str]:
         assert self.type is not None
         type_ref = self.type.fullname()
@@ -498,6 +547,14 @@ class TypeVarType(Type):
         else:
             return self.upper_bound
 
+    def __hash__(self) -> int:
+        return hash(self.id)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, TypeVarType):
+            return NotImplemented
+        return self.id == other.id
+
     def serialize(self) -> JsonDict:
         assert not self.id.is_meta_var()
         return {'.class': 'TypeVarType',
@@ -588,9 +645,9 @@ class CallableType(FunctionLike):
                  arg_names: List[Optional[str]],
                  ret_type: Type,
                  fallback: Instance,
-                 name: str = None,
-                 definition: SymbolNode = None,
-                 variables: List[TypeVarDef] = None,
+                 name: Optional[str] = None,
+                 definition: Optional[SymbolNode] = None,
+                 variables: Optional[List[TypeVarDef]] = None,
                  line: int = -1,
                  column: int = -1,
                  is_ellipsis_args: bool = False,
@@ -598,7 +655,7 @@ class CallableType(FunctionLike):
                  is_classmethod_class: bool = False,
                  special_sig: Optional[str] = None,
                  from_type_type: bool = False,
-                 bound_args: List[Optional[Type]] = None,
+                 bound_args: Optional[List[Optional[Type]]] = None,
                  ) -> None:
         if variables is None:
             variables = []
@@ -776,6 +833,23 @@ class CallableType(FunctionLike):
             a.append(tv.id)
         return a
 
+    def __hash__(self) -> int:
+        return hash((self.ret_type, self.is_type_obj(),
+                     self.is_ellipsis_args, self.name,
+                    tuple(self.arg_types), tuple(self.arg_names), tuple(self.arg_kinds)))
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, CallableType):
+            return (self.ret_type == other.ret_type and
+                    self.arg_types == other.arg_types and
+                    self.arg_names == other.arg_names and
+                    self.arg_kinds == other.arg_kinds and
+                    self.name == other.name and
+                    self.is_type_obj() == other.is_type_obj() and
+                    self.is_ellipsis_args == other.is_ellipsis_args)
+        else:
+            return NotImplemented
+
     def serialize(self) -> JsonDict:
         # TODO: As an optimization, leave out everything related to
         # generic functions for non-generic functions.
@@ -858,6 +932,14 @@ class Overloaded(FunctionLike):
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_overloaded(self)
 
+    def __hash__(self) -> int:
+        return hash(tuple(self.items()))
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Overloaded):
+            return NotImplemented
+        return self.items() == other.items()
+
     def serialize(self) -> JsonDict:
         return {'.class': 'Overloaded',
                 'items': [t.serialize() for t in self.items()],
@@ -899,6 +981,14 @@ class TupleType(Type):
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_tuple_type(self)
 
+    def __hash__(self) -> int:
+        return hash((tuple(self.items), self.fallback))
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, TupleType):
+            return NotImplemented
+        return self.items == other.items and self.fallback == other.fallback
+
     def serialize(self) -> JsonDict:
         return {'.class': 'TupleType',
                 'items': [t.serialize() for t in self.items],
@@ -913,15 +1003,16 @@ class TupleType(Type):
                          Instance.deserialize(data['fallback']),
                          implicit=data['implicit'])
 
-    def copy_modified(self, *, fallback: Instance = None,
-                      items: List[Type] = None) -> 'TupleType':
+    def copy_modified(self, *, fallback: Optional[Instance] = None,
+                      items: Optional[List[Type]] = None) -> 'TupleType':
         if fallback is None:
             fallback = self.fallback
         if items is None:
             items = self.items
         return TupleType(items, fallback, self.line, self.column)
 
-    def slice(self, begin: int, stride: int, end: int) -> 'TupleType':
+    def slice(self, begin: Optional[int], stride: Optional[int],
+              end: Optional[int]) -> 'TupleType':
         return TupleType(self.items[begin:end:stride], self.fallback,
                          self.line, self.column, self.implicit)
 
@@ -951,6 +1042,21 @@ class TypedDictType(Type):
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_typeddict_type(self)
 
+    def __hash__(self) -> int:
+        return hash((frozenset(self.items.items()), self.fallback,
+                     frozenset(self.required_keys)))
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, TypedDictType):
+            if frozenset(self.items.keys()) != frozenset(other.items.keys()):
+                return False
+            for (_, left_item_type, right_item_type) in self.zip(other):
+                if not left_item_type == right_item_type:
+                    return False
+            return self.fallback == other.fallback and self.required_keys == other.required_keys
+        else:
+            return NotImplemented
+
     def serialize(self) -> JsonDict:
         return {'.class': 'TypedDictType',
                 'items': [[n, t.serialize()] for (n, t) in self.items.items()],
@@ -975,9 +1081,9 @@ class TypedDictType(Type):
         assert self.fallback.type.typeddict_type is not None
         return self.fallback.type.typeddict_type.as_anonymous()
 
-    def copy_modified(self, *, fallback: Instance = None,
-                      item_types: List[Type] = None,
-                      required_keys: Set[str] = None) -> 'TypedDictType':
+    def copy_modified(self, *, fallback: Optional[Instance] = None,
+                      item_types: Optional[List[Type]] = None,
+                      required_keys: Optional[Set[str]] = None) -> 'TypedDictType':
         if fallback is None:
             fallback = self.fallback
         if item_types is None:
@@ -1048,6 +1154,14 @@ class UnionType(Type):
         self.can_be_false = any(item.can_be_false for item in items)
         super().__init__(line, column)
 
+    def __hash__(self) -> int:
+        return hash(frozenset(self.items))
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, UnionType):
+            return NotImplemented
+        return frozenset(self.items) == frozenset(other.items)
+
     @staticmethod
     def make_union(items: List[Type], line: int = -1, column: int = -1) -> Type:
         if len(items) > 1:
@@ -1162,7 +1276,7 @@ class PartialType(Type):
     inner_types = None  # type: List[Type]
 
     def __init__(self,
-                 type: Optional['mypy.nodes.TypeInfo'],
+                 type: 'Optional[mypy.nodes.TypeInfo]',
                  var: 'mypy.nodes.Var',
                  inner_types: List[Type]) -> None:
         self.type = type
@@ -1244,6 +1358,14 @@ class TypeType(Type):
     def accept(self, visitor: 'TypeVisitor[T]') -> T:
         return visitor.visit_type_type(self)
 
+    def __hash__(self) -> int:
+        return hash(self.item)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, TypeType):
+            return NotImplemented
+        return self.item == other.item
+
     def serialize(self) -> JsonDict:
         return {'.class': 'TypeType', 'item': self.item.serialize()}
 
@@ -1253,6 +1375,41 @@ class TypeType(Type):
         return TypeType.make_normalized(deserialize_type(data['item']))
 
 
+class ForwardRef(Type):
+    """Class to wrap forward references to other types.
+
+    This is used when a forward reference to an (unanalyzed) synthetic type is found,
+    for example:
+
+        x: A
+        A = TypedDict('A', {'x': int})
+
+    To avoid false positives and crashes in such situations, we first wrap the first
+    occurrence of 'A' in ForwardRef. Then, the wrapped UnboundType is updated in the third
+    pass of semantic analysis and ultimately fixed in the patches after the third pass.
+    So that ForwardRefs are temporary and will be completely replaced with the linked types
+    or Any (to avoid cyclic references) before the type checking stage.
+    """
+    link = None  # type: Type  # The wrapped type
+
+    def __init__(self, link: Type) -> None:
+        self.link = link
+
+    def accept(self, visitor: 'TypeVisitor[T]') -> T:
+        return visitor.visit_forwardref_type(self)
+
+    def serialize(self):
+        if isinstance(self.link, UnboundType):
+            name = self.link.name
+        if isinstance(self.link, Instance):
+            name = self.link.type.name()
+        else:
+            name = self.link.__class__.__name__
+        # We should never get here since all forward references should be resolved
+        # and removed during semantic analysis.
+        assert False, "Internal error: Unresolved forward reference to {}".format(name)
+
+
 #
 # Visitor-related classes
 #
@@ -1328,6 +1485,9 @@ class TypeVisitor(Generic[T]):
     def visit_type_type(self, t: TypeType) -> T:
         pass
 
+    def visit_forwardref_type(self, t: ForwardRef) -> T:
+        raise RuntimeError('Internal error: unresolved forward reference')
+
 
 class SyntheticTypeVisitor(TypeVisitor[T]):
     """A TypeVisitor that also knows how to visit synthetic AST constructs.
@@ -1430,6 +1590,9 @@ class TypeTranslator(TypeVisitor[Type]):
     def visit_type_type(self, t: TypeType) -> Type:
         return TypeType.make_normalized(t.item.accept(self), line=t.line, column=t.column)
 
+    def visit_forwardref_type(self, t: ForwardRef) -> Type:
+        return t
+
 
 class TypeStrVisitor(SyntheticTypeVisitor[str]):
     """Visitor for pretty-printing types into strings.
@@ -1443,7 +1606,7 @@ class TypeStrVisitor(SyntheticTypeVisitor[str]):
      - Represent the NoneTyp type as None.
     """
 
-    def __init__(self, id_mapper: IdMapper = None) -> None:
+    def __init__(self, id_mapper: Optional[IdMapper] = None) -> None:
         self.id_mapper = id_mapper
 
     def visit_unbound_type(self, t: UnboundType)-> str:
@@ -1585,6 +1748,9 @@ class TypeStrVisitor(SyntheticTypeVisitor[str]):
     def visit_type_type(self, t: TypeType) -> str:
         return 'Type[{}]'.format(t.item.accept(self))
 
+    def visit_forwardref_type(self, t: ForwardRef) -> str:
+        return '~{}'.format(t.link.accept(self))
+
     def list_str(self, a: List[Type]) -> str:
         """Convert items of an array to strings (pretty-print types)
         and join the results with commas.
@@ -1664,6 +1830,9 @@ class TypeQuery(SyntheticTypeVisitor[T]):
     def visit_type_type(self, t: TypeType) -> T:
         return t.item.accept(self)
 
+    def visit_forwardref_type(self, t: ForwardRef) -> T:
+        return t.link.accept(self)
+
     def visit_ellipsis_type(self, t: EllipsisType) -> T:
         return self.strategy([])
 
@@ -1762,20 +1931,20 @@ def function_type(func: mypy.nodes.FuncBase, fallback: Instance) -> FunctionLike
         # Implicit type signature with dynamic types.
         # Overloaded functions always have a signature, so func must be an ordinary function.
         assert isinstance(func, mypy.nodes.FuncItem), str(func)
-        return callable_type(cast(mypy.nodes.FuncItem, func), fallback)
+        return callable_type(func, fallback)
 
 
 def callable_type(fdef: mypy.nodes.FuncItem, fallback: Instance,
-                  ret_type: Type = None) -> CallableType:
+                  ret_type: Optional[Type] = None) -> CallableType:
     name = fdef.name()
     if name:
         name = '"{}"'.format(name)
 
     return CallableType(
-        [AnyType()] * len(fdef.arg_names),
+        [AnyType(TypeOfAny.unannotated)] * len(fdef.arg_names),
         fdef.arg_kinds,
         [None if argument_elide_name(n) else n for n in fdef.arg_names],
-        ret_type or AnyType(),
+        ret_type or AnyType(TypeOfAny.unannotated),
         fallback,
         name,
         implicit=True,
diff --git a/mypy/version.py b/mypy/version.py
index 7fe2620..85ad90a 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -1,7 +1,7 @@
 import os
 from mypy import git
 
-__version__ = '0.521'
+__version__ = '0.530'
 base_version = __version__
 
 mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
diff --git a/mypy/waiter.py b/mypy/waiter.py
index e8ba99d..7e475d3 100644
--- a/mypy/waiter.py
+++ b/mypy/waiter.py
@@ -3,7 +3,7 @@
 This is used for running mypy tests.
 """
 
-from typing import Dict, List, Optional, Set, Tuple, Any, Iterable
+from typing import Dict, List, Optional, Set, Tuple, Any, Iterable, IO
 
 import os
 from multiprocessing import cpu_count
@@ -24,21 +24,22 @@ class WaiterError(Exception):
 class LazySubprocess:
     """Wrapper around a subprocess that runs a test task."""
 
-    def __init__(self, name: str, args: List[str], *, cwd: str = None,
-                 env: Dict[str, str] = None, passthrough: Optional[int] = None) -> None:
+    def __init__(self, name: str, args: List[str], *, cwd: Optional[str] = None,
+                 env: Optional[Dict[str, str]] = None,
+                 passthrough: Optional[int] = None) -> None:
         self.name = name
         self.args = args
         self.cwd = cwd
         self.env = env
-        self.start_time = None  # type: float
-        self.end_time = None  # type: float
+        self.start_time = None  # type: Optional[float]
+        self.end_time = None  # type: Optional[float]
         # None means no passthrough
         # otherwise, it represents verbosity level
         self.passthrough = passthrough
 
     def start(self) -> None:
         if self.passthrough is None or self.passthrough < 0:
-            self.outfile = tempfile.TemporaryFile()
+            self.outfile = tempfile.TemporaryFile()  # type: Optional[IO[Any]]
         else:
             self.outfile = None
         self.start_time = time.perf_counter()
@@ -62,7 +63,10 @@ class LazySubprocess:
 
     @property
     def elapsed_time(self) -> float:
-        return self.end_time - self.start_time
+        if self.end_time is None or self.start_time is None:
+            return 0
+        else:
+            return self.end_time - self.start_time
 
 
 class Noter:
@@ -143,7 +147,7 @@ class Waiter:
         self.ff = ff
         assert limit > 0
         self.xfail = set(xfail)
-        self._note = None  # type: Noter
+        self._note = None  # type: Optional[Noter]
         self.times1 = {}  # type: Dict[str, float]
         self.times2 = {}  # type: Dict[str, float]
         self.new_log = defaultdict(dict)  # type: Dict[str, Dict[str, float]]
@@ -184,6 +188,7 @@ class Waiter:
                 print('%-8s #%d %s' % ('COMMAND', num, cmd_str))
             sys.stdout.flush()
         elif self.verbosity >= 0:
+            assert self._note is not None
             self._note.start(num)
         self.next += 1
 
@@ -203,6 +208,7 @@ class Waiter:
                 code = cmd.process.poll()
                 if code is not None:
                     cmd.end_time = time.perf_counter()
+                    assert cmd.start_time is not None
                     self.new_log['exit_code'][cmd.name] = code
                     self.new_log['runtime'][cmd.name] = cmd.end_time - cmd.start_time
                     return pid, code
@@ -227,6 +233,7 @@ class Waiter:
             print('%-8s #%d %s' % (msg, num, name))
             sys.stdout.flush()
         elif self.verbosity >= 0:
+            assert self._note is not None
             self._note.stop(num, bool(rc))
         elif self.verbosity >= -1:
             sys.stdout.write('.' if rc == 0 else msg[0])
@@ -239,7 +246,7 @@ class Waiter:
 
         if rc != 0:
             if name not in self.xfail:
-                fail_type = 'FAILURE'
+                fail_type = 'FAILURE'  # type: Optional[str]
             else:
                 fail_type = 'XFAIL'
         else:
@@ -300,6 +307,7 @@ class Waiter:
                 sequential = -(cmd.name in self.sequential_tasks)
                 if self.ff:
                     # failed tasks first with -ff
+                    assert logs is not None
                     exit_code = -logs[-1]['exit_code'].get(cmd.name, 0)
                     if not exit_code:
                         # avoid interrupting parallel tasks with sequential in between
@@ -343,11 +351,14 @@ class Waiter:
             total_tests += tests
             total_failed_tests += test_fails
         if self.verbosity == 0:
+            assert self._note is not None
             self._note.clear()
 
         if self.new_log:  # don't append empty log, it will corrupt the cache file
             # log only LOGSIZE most recent tests
-            test_log = (self.load_log_file() + [self.new_log])[-self.LOGSIZE:]
+            logs = self.load_log_file()
+            assert logs is not None
+            test_log = (logs + [self.new_log])[-self.LOGSIZE:]
             try:
                 with open(self.FULL_LOG_FILENAME, 'w') as fp:
                     json.dump(test_log, fp, sort_keys=True, indent=4)
diff --git a/mypy_self_check.ini b/mypy_self_check.ini
index de99649..6b97ed6 100644
--- a/mypy_self_check.ini
+++ b/mypy_self_check.ini
@@ -2,7 +2,12 @@
 disallow_untyped_defs = True
 disallow_subclassing_any = True
 warn_no_return = True
+strict_optional = True
+no_implicit_optional = True
+disallow_any = generics, unimported
+warn_redundant_casts = True
+warn_unused_ignores = True
 
-; historical exceptions
-[mypy-mypy.test.testextensions]
-disallow_untyped_defs = False
+# historical exception
+[mypy-mypy.semanal]
+strict_optional = False
diff --git a/pinfer/.gitignore b/pinfer/.gitignore
deleted file mode 100644
index e1dace5..0000000
--- a/pinfer/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-__pycache__
-*~
-*.pyc
diff --git a/pinfer/LICENSE b/pinfer/LICENSE
deleted file mode 100644
index ecdce98..0000000
--- a/pinfer/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-pinfer is licensed under the terms of the MIT license, reproduced below.
-
-= = = = =
-
-The MIT License
-
-Copyright (c) 2013, 2014 Jukka Lehtosalo
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
-= = = = =
diff --git a/pinfer/README b/pinfer/README
deleted file mode 100644
index 1f4fe4c..0000000
--- a/pinfer/README
+++ /dev/null
@@ -1,47 +0,0 @@
-ReadMe for pinfer
-=================
-
-Introduction
-------------
-
-Pinfer is tool for runtime type inference of variable types and
-function signatures in Python programs. The inferred types are mostly
-compatible with mypy types. It is intended for coming up with draft
-types when migrating Python code to static typing, but it can also be
-used as a code understanding or analysis tool.
-
-Pinfer is very experimental!
-
-Requirements
-------------
-
- * Python 3.2 or later
-
-Basic usage
------------
-
-To infer types of all functions and methods in a module:
-
-  import foo   # target module
-  import pinfer
-
-  # set up type inference and dumping
-  pinfer.infer_module(foo)
-  pinfer.dump_at_exit()
-
-  # now do something with the module, e.g. run tests
-
-For inferring a Python module, add the above lines to the test suite.
-
-Handy wrapper
--------------
-
-The p.py script provides a handy wrapper for the above.
-
-
-Copyright
--------------
-
-This project includes files from the open source CPython project.  Those files are Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved.  The license can be found at https://github.com/python/cpython/blob/master/LICENSE.
-
diff --git a/pinfer/inspect3.py b/pinfer/inspect3.py
deleted file mode 100644
index 4d74be1..0000000
--- a/pinfer/inspect3.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# from Python 3's inspect.py
-# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
-'''
-provide getfullargspec() and getcallargs() for Python 2
-'''
-
-import sys
-import inspect
-
-if sys.version_info.major == 2:
-
-    def getfullargspec(func):
-        (args, varargs, keywords, defaults) = inspect.getargspec(func)
-        return (args, varargs, keywords, defaults, [], [], {})
-
-
-    def getcallargs(*func_and_positional, **named):
-        """Get the mapping of arguments to values.
-
-        A dict is returned, with keys the function argument names (including the
-        names of the * and ** arguments, if any), and values the respective bound
-        values from 'positional' and 'named'."""
-        func = func_and_positional[0]
-        positional = func_and_positional[1:]
-        spec = getfullargspec(func)
-        args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, ann = spec
-        f_name = func.__name__
-        arg2value = {}
-
-
-        if inspect.ismethod(func) and func.__self__ is not None:
-            # implicit 'self' (or 'cls' for classmethods) argument
-            positional = (func.__self__,) + positional
-        num_pos = len(positional)
-        num_args = len(args)
-        num_defaults = len(defaults) if defaults else 0
-
-        n = min(num_pos, num_args)
-        for i in range(n):
-            arg2value[args[i]] = positional[i]
-        if varargs:
-            arg2value[varargs] = tuple(positional[n:])
-        possible_kwargs = set(args + kwonlyargs)
-        if varkw:
-            arg2value[varkw] = {}
-        for kw, value in named.items():
-            if kw not in possible_kwargs:
-                if not varkw:
-                    raise TypeError("%s() got an unexpected keyword argument %r" %
-                                    (f_name, kw))
-                arg2value[varkw][kw] = value
-                continue
-            if kw in arg2value:
-                raise TypeError("%s() got multiple values for argument %r" %
-                                (f_name, kw))
-            arg2value[kw] = value
-        if num_pos > num_args and not varargs:
-            _too_many(f_name, args, kwonlyargs, varargs, num_defaults,
-                       num_pos, arg2value)
-        if num_pos < num_args:
-            req = args[:num_args - num_defaults]
-            for arg in req:
-                if arg not in arg2value:
-                    _missing_arguments(f_name, req, True, arg2value)
-            for i, arg in enumerate(args[num_args - num_defaults:]):
-                if arg not in arg2value:
-                    arg2value[arg] = defaults[i]
-        missing = 0
-        for kwarg in kwonlyargs:
-            if kwarg not in arg2value:
-                if kwonlydefaults and kwarg in kwonlydefaults:
-                    arg2value[kwarg] = kwonlydefaults[kwarg]
-                else:
-                    missing += 1
-        if missing:
-            _missing_arguments(f_name, kwonlyargs, False, arg2value)
-        return arg2value
-
-
-    def _too_many(f_name, args, kwonly, varargs, defcount, given, values):
-        atleast = len(args) - defcount
-        kwonly_given = len([arg for arg in kwonly if arg in values])
-        if varargs:
-            plural = atleast != 1
-            sig = "at least %d" % (atleast,)
-        elif defcount:
-            plural = True
-            sig = "from %d to %d" % (atleast, len(args))
-        else:
-            plural = len(args) != 1
-            sig = str(len(args))
-        kwonly_sig = ""
-        if kwonly_given:
-            msg = " positional argument%s (and %d keyword-only argument%s)"
-            kwonly_sig = (msg % ("s" if given != 1 else "", kwonly_given,
-                                 "s" if kwonly_given != 1 else ""))
-        raise TypeError("%s() takes %s positional argument%s but %d%s %s given" %
-                (f_name, sig, "s" if plural else "", given, kwonly_sig,
-                 "was" if given == 1 and not kwonly_given else "were"))
-
-
-    def _missing_arguments(f_name, argnames, pos, values):
-        names = [repr(name) for name in argnames if name not in values]
-        missing = len(names)
-        if missing == 1:
-            s = names[0]
-        elif missing == 2:
-            s = "{} and {}".format(*names)
-        else:
-            tail = ", {} and {}".format(*names[-2:])
-            del names[-2:]
-            s = ", ".join(names) + tail
-        raise TypeError("%s() missing %i required %s argument%s: %s" %
-                        (f_name, missing,
-                          "positional" if pos else "keyword-only",
-                          "" if missing == 1 else "s", s))
-
-
-else:
-    getfullargspec = inspect.getfullargspec
-    getcallargs = inspect.getcallargs
diff --git a/pinfer/p.py b/pinfer/p.py
deleted file mode 100644
index 451038d..0000000
--- a/pinfer/p.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3
-"""Stub to run pinfer on a module.
-
-Usage:
-
-  p.py targetmod testfile [outfile] [ -- testargs]
-
-Where:
-
-  targetmod:  the full target module (e.g. textwrap)
-  testfile: the full test module file (e.g. test/test_textwrap.py)
-  outfile:  where to write the annotated module.  If unspecified, will
-            write stubs at end of stdout.
-
-Example invocation:
-
-  python3 p.py textwrap test/test_textwrap.py
-"""
-
-
-import sys
-import imp
-import pinfer
-import os
-import atexit
-import inspect
-
-iport = __builtins__.__import__
-watched = set()
-
-
-def inferring_import(*args, **kwargs):
-    module = iport(*args, **kwargs)
-    if module not in watched:
-        watched.add(module)
-        pinfer.infer_module(module)
-    return module
-
-
-def main():
-    if '--' in sys.argv:
-        argslen = sys.argv.index('--')
-    else:
-        argslen = len(sys.argv)
-    args = sys.argv[1:argslen]
-    del sys.argv[1:argslen + 1]
-
-    if len(args) == 2:
-        targetpackage, testfile = args
-        outfile = None
-    elif len(args) == 3:
-        targetpackage, testfile, outfile = args
-    else:
-        sys.stderr.write('Usage: %s targetmodule testfile [outfile] [ -- testargs]\n' %
-                         sys.argv[0])
-        sys.exit(2)
-
-    # help us with local imports
-    filemodule = os.path.dirname(os.path.abspath(testfile))
-    sys.path.append(filemodule)
-
-    targetmod = __import__(targetpackage)
-    targetfile = inspect.getfile(targetmod)
-    pinfer.infer_module(targetmod)
-
-    if outfile:
-        @atexit.register
-        def rewrite_file(targetfile=targetfile, outfile=outfile, pinfer=pinfer):
-            if targetfile.endswith(".pyc"):
-                targetfile = targetfile[0:-1]
-            annotated = pinfer.annotate_file(targetfile)
-            open(outfile, "w").write(annotated)
-    else:
-        pinfer.dump_at_exit()
-
-    pinfer.ignore_files.add(os.path.abspath(testfile))
-
-    # run testfile as main
-    del sys.modules['__main__']
-    imp.load_source('__main__', testfile)
-
-if __name__ == '__main__':
-    main()
diff --git a/pinfer/pinfer.py b/pinfer/pinfer.py
deleted file mode 100644
index 3dd1445..0000000
--- a/pinfer/pinfer.py
+++ /dev/null
@@ -1,686 +0,0 @@
-"""Tools for runtime type inference"""
-
-import inspect
-from inspect3 import getfullargspec, getcallargs
-import types
-import codecs
-import os
-import tokenize
-try:
-    from StringIO import StringIO
-    from unparse import Unparser
-except:
-    from io import StringIO
-    from unparse3 import Unparser
-import ast
-
-
-MAX_INFERRED_TUPLE_LENGTH = 10
-PREFERRED_LINE_LENGTH = 79
-
-
-var_db = {}  # (location, variable) -> type
-func_argid_db = {}  # funcid -> argspec
-func_arg_db = {}  # (funcid, name) -> type
-func_return_db = {}  # funcname -> type
-func_source_db = {}  # funcid -> source string
-#func_info_db = {}  # funcid -> (class, name, argspec, file, line, source)
-ignore_files = set()
-
-# The type inferencing wrapper should not be reentrant.  It's not, in theory, calling
-# out to any external code which we would want to infer the types of.  However,
-# sometimes we do something like infer_type(arg.keys()) or infer_type(arg.values()) if
-# the arg is a collection, and we want to know about the types of its elements.  .keys(),
-# .values(), etc. can be overloaded, possibly to a method we've wrapped.  This can become
-# infinitely recursive, particularly because on something like arg.keys(), keys() gets passed
-# arg as the first parameter, so if we've wrapped keys() we'll try to infer_type(arg),
-# which will detect it's a dictionary, call infer_type(arg.keys()), recurse and so on.
-# We ran in to this problem with collections.OrderedDict.
-# To prevent reentrancy, we set is_performing_inference = True iff we're in the middle of
-# inferring the types of a function.  If we try to run another function we've wrapped,
-# we skip type inferencing so we can't accidentally infinitely recurse.
-is_performing_inference = False
-
-
-def reset():
-    global var_db, func_argid_db, func_arg_db, func_return_db, func_source_db
-    global ignore_files, is_performing_inference
-    var_db = {}
-    func_arg_db = {}
-    func_return_db = {}
-    # we don't actually want to clear these on reset(), or we'll
-    # lose the functions we've already wrapped forever.
-    #func_source_db = {}
-    #func_argid_db = {}
-    is_performing_inference = False
-    ignore_files = set()
-
-
-def format_state(pretty=False):
-    lines = []
-    for loc, var in sorted(var_db.keys()):
-        lines.append('%s: %s' % (var, var_db[(loc, var)]))
-    funcnames = sorted(set(func_return_db.keys()))
-    prevclass = ''
-    indent = ''
-    for funcid in funcnames:
-        curclass, name, sourcefile, sourceline = funcid
-        if curclass != prevclass:
-            if curclass:
-                lines.append('class %s(...):' % curclass)
-                indent = ' ' * 4
-            else:
-                indent = ''
-            prevclass = curclass
-
-        lines.append(format_sig(funcid, name, indent, pretty))
-    return '\n'.join(lines)
-
-
-def unparse_ast(node):
-    buf = StringIO()
-    Unparser(node, buf)
-    return buf.getvalue().strip()
-
-
-def format_sig(funcid, fname, indent, pretty, defaults=[]):
-    (argnames, varargs, varkw, _, kwonlyargs, _, _) = func_argid_db[funcid]
-
-    # to get defaults, parse the function, get the nodes for the
-    # defaults, then unparse them
-    try:
-        fn_ast = ast.parse(func_source_db[funcid].strip()).body[0]
-
-        # override fname if we parsed a different one
-        fname = fn_ast.name
-
-        defaults = [unparse_ast(dn) for dn in fn_ast.args.defaults]
-
-        if hasattr(fn_ast.args, 'kw_defaults'):
-            kwonly_defaults = [unparse_ast(dn) for dn in fn_ast.args.kw_defaults]
-        else:
-            kwonly_defaults = []
-    except:
-        defaults, kwonly_defaults = [], []
-    finally:
-        # pad defaults to match the length of args
-        defaults = ([None] * (len(argnames) - len(defaults))) + defaults
-        kwonly_defaults = ([None] * (len(kwonlyargs) - len(kwonly_defaults))) + kwonly_defaults
-
-    args = [('', arg, default) for (arg, default) in zip(argnames, defaults)]
-
-    if varargs:
-        args += [('*', varargs, None)]
-    elif len(kwonlyargs) > 0:
-        args += [('*', '', None)]
-    if len(kwonlyargs) > 0:
-        args += [('', arg, default) for (arg, default) in zip(kwonlyargs, kwonly_defaults)]
-    if varkw:
-        args += [('**', varkw, None)]
-
-    argstrs = []
-    for i, (prefix, arg, default) in enumerate(args):
-        argstr = prefix + arg
-
-        # Omit type of self argument.
-        if (funcid, arg) in func_arg_db and not (i == 0 and arg == 'self'):
-            argstr += ': %s' % func_arg_db[(funcid, arg)]
-
-        if default:
-            argstr += ' = %s' % default
-
-        argstrs.append(argstr)
-
-    ret = str(func_return_db.get(funcid, Unknown()))
-
-    sig = 'def %s(%s) -> %s' % (fname, ', '.join(argstrs), ret)
-    if not pretty or len(sig) <= PREFERRED_LINE_LENGTH or not args:
-        return indent + sig
-
-    else:
-        # Format into multiple lines to conserve horizontal space.
-        first = indent + 'def %s(' % fname
-        extra_indent = first.index('(') + 1
-
-        decl = indent + first
-        decl += (',\n' + indent + ' ' * extra_indent).join(argstrs)
-        decl += ')\n%s -> %s' % (indent + ' ' * (extra_indent - 4), ret)
-        return decl
-
-
-def annotate_file(path):
-    # this should be documented somewhere...
-    INDENT_TOKEN = 5
-
-    with open(path, 'r') as targetfile:
-        source = targetfile.read()
-
-    line_offsets = []
-    source_length = 0
-    for line in source.split('\n'):
-        line_offsets.append(source_length)
-        source_length = source_length + len(line) + 1
-
-    funcids = set(funcid for funcid, arg in func_arg_db)
-
-    # list of (oldstart, oldend, replacement)
-    replacements = []  # type: List[Tuple[Int, Int, String]]
-
-    for funcid in funcids:
-        class_name, name, sourcefile, def_start_line = funcid
-        if sourcefile != path:
-            continue
-
-        func_source = func_source_db[funcid]
-        tokens = list(tokenize.generate_tokens(StringIO(func_source).readline))
-        assert len(tokens) > 0
-
-        # we're making the assumption that the def at least gets to start on
-        # it's own line, which is fine for non-lambdas
-
-        if tokens[0][0] == INDENT_TOKEN:
-            indent = tokens[0][1]
-            del tokens[0]
-        else:
-            indent = ''
-
-        # Find the first indent, which should be between the end of the def
-        # and before the start of the body.  Then find the preceding colon,
-        # which should be at the end of the def.
-
-        for indent_loc in range(len(tokens)):
-            if tokens[indent_loc][0] == INDENT_TOKEN:
-                function_is_one_line = False
-                break
-            else:
-                function_is_one_line = True
-
-        if function_is_one_line:
-            # we're also making the assumption that the def has an indent on the
-            # line following the signature, which is true almost all of the time.
-            # If this is not the case, we should just leave a comment above the
-            # function, although I might not have time to do that now.
-            continue
-
-        for def_end_loc in range(indent_loc, -1, -1):
-            if tokens[def_end_loc][1] == ':':
-                break
-
-        assert def_end_loc > 0
-
-        def_end_line, def_end_col = tokens[def_end_loc][2]
-        def_end_line -= 1  # the tokenizer apparently 1-indexes lines
-        def_end_line += def_start_line
-
-        def_start_offset = line_offsets[def_start_line]
-        def_end_offset = line_offsets[def_end_line] + def_end_col
-
-        annotated_def = format_sig(funcid, name, indent, True)
-
-        replacements.append((def_start_offset, def_end_offset, annotated_def))
-
-    # ideally, we'd put this after the docstring
-    replacements.append((0, 0, "from typing import List, Dict, Set, Tuple, Callable, Pattern, Match, Union, Optional\n"))
-
-    # absurdly inefficient algorithm: replace with O(n) writer
-
-    for (start, end, replacement) in sorted(replacements, key=lambda r: r[0], reverse=True):
-        source = source[0:start] + replacement + source[end:]
-
-    return source
-
-
-def dump():
-    s = format_state(pretty=True)
-    if s:
-        print()
-        print('INFERRED TYPES:')
-        print(s)
-    reset()
-
-
-def dump_at_exit():
-    import atexit
-    atexit.register(dump)
-
-
-def get_defining_file(obj):
-    try:
-        path = os.path.abspath(inspect.getfile(obj))
-        if path.endswith('.pyc'):
-            path = path[0:-1]
-        return path
-    except:
-        return None
-
-
-def infer_var(name, value):
-    key = (None, name)
-    update_var_db(key, value)
-
-
-def infer_attrs(x):
-    if hasattr(x, '__class__'):
-        t = x.__class__
-    else:
-        t = type(x)
-    cls = t.__name__
-    typedict = t.__dict__
-    for dict in x.__dict__, typedict:
-        for attr, value in dict.items():
-            if attr in ('__dict__', '__doc__', '__module__', '__weakref__'):
-                continue
-            if type(value) is type(infer_attrs) and dict is typedict:
-                # Skip methods.
-                continue
-            key = (None, '%s.%s' % (cls, attr))
-            update_var_db(key, value)
-
-
-def infer_method_signature(class_name):
-    def decorator(func):
-        return infer_signature(func, class_name)
-    return decorator
-
-
-def infer_signature(func, class_name=''):
-    """Decorator that infers the signature of a function."""
-
-    # infer_method_signature should be idempotent
-    if hasattr(func, '__is_inferring_sig'):
-        return func
-
-    assert func.__module__ != infer_method_signature.__module__
-
-    try:
-        funcfile = get_defining_file(func)
-        funcsource, sourceline = inspect.getsourcelines(func)
-        sourceline -= 1  # getsourcelines is apparently 1-indexed
-    except:
-        return func
-
-    funcid = (class_name, func.__name__, funcfile, sourceline)
-    func_source_db[funcid] = ''.join(funcsource)
-
-    try:
-        func_argid_db[funcid] = getfullargspec(func)
-        vargs_name, kwargs_name = func_argid_db[funcid][1], func_argid_db[funcid][2]
-    except TypeError:
-        # Not supported.
-        return func
-
-    def wrapper(*args, **kwargs):
-        global is_performing_inference
-        # If we're already doing inference, we should be in our own code, not code we're checking.
-        # Not doing this check sometimes results in infinite recursion.
-
-        if is_performing_inference:
-            return func(*args, **kwargs)
-
-        expecting_type_error, got_type_error, got_exception = False, False, False
-
-        is_performing_inference = True
-        try:
-            callargs = getcallargs(func, *args, **kwargs)
-
-            # we have to handle *args and **kwargs separately
-            if vargs_name:
-                va = callargs.pop(vargs_name)
-            if kwargs_name:
-                kw = callargs.pop(kwargs_name)
-
-            arg_db = {arg: infer_value_type(value) for arg, value in callargs.items()}
-
-            # *args and **kwargs need to merge the types of all their values
-            if vargs_name:
-                arg_db[vargs_name] = union_many_types(*[infer_value_type(v) for v in va])
-            if kwargs_name:
-                arg_db[kwargs_name] = union_many_types(*[infer_value_type(v) for v in kw.values()])
-
-        except TypeError:
-            got_exception = expecting_type_error = True
-        except:
-            got_exception = True
-        finally:
-            is_performing_inference = False
-
-        try:
-            ret = func(*args, **kwargs)
-        except TypeError:
-            got_type_error = got_exception = True
-            raise
-        except:
-            got_exception = True
-            raise
-        finally:
-            if not got_exception:
-                assert not expecting_type_error
-
-                # if we didn't get a TypeError, update the actual database
-                for arg, t in arg_db.items():
-                    update_db(func_arg_db, (funcid, arg), t)
-
-                # if we got an exception, we don't have a ret
-                if not got_exception:
-                    is_performing_inference = True
-                    try:
-                        type = infer_value_type(ret)
-                        update_db(func_return_db, funcid, type)
-                    except:
-                        pass
-                    finally:
-                        is_performing_inference = False
-
-        return ret
-
-    if hasattr(func, '__name__'):
-        wrapper.__name__ = func.__name__
-    wrapper.__is_inferring_sig = True
-    return wrapper
-
-
-def infer_class(cls):
-    """Class decorator for inferring signatures of all methods of the class."""
-    for attr, value in cls.__dict__.items():
-        if type(value) is type(infer_class):
-            setattr(cls, attr, infer_method_signature(cls.__name__)(value))
-    return cls
-
-
-def infer_module(namespace):
-    if hasattr(namespace, '__dict__'):
-        namespace = namespace.__dict__
-    for name, value in list(namespace.items()):
-        if inspect.isfunction(value):
-            namespace[name] = infer_signature(value)
-        elif inspect.isclass(value):
-            namespace[name] = infer_class(value)
-
-
-def update_var_db(key, value):
-    type = infer_value_type(value)
-    update_db(var_db, key, type)
-
-
-def update_db(db, key, type):
-    if key not in db:
-        db[key] = type
-    else:
-        db[key] = combine_types(db[key], type)
-
-
-def merge_db(db, other):
-    assert id(db) != id(other)
-    for key in other.keys():
-        if key not in db:
-            db[key] = other[key]
-        else:
-            db[key] = combine_types(db[key], other[key])
-
-
-def infer_value_type(value, depth=0):
-    # Prevent infinite recursion
-    if depth > 5:
-        return Unknown()
-    depth += 1
-
-    if value is None:
-        return None
-    elif isinstance(value, list):
-        return Generic('List', [infer_value_types(value, depth)])
-    elif isinstance(value, dict):
-        keytype = infer_value_types(value.keys(), depth)
-        valuetype = infer_value_types(value.values(), depth)
-        return Generic('Dict', (keytype, valuetype))
-    elif isinstance(value, tuple):
-        if len(value) <= MAX_INFERRED_TUPLE_LENGTH:
-            return Tuple(infer_value_type(item, depth)
-                         for item in value)
-        else:
-            return Generic('TupleSequence', [infer_value_types(value, depth)])
-    elif isinstance(value, set):
-        return Generic('Set', [infer_value_types(value, depth)])
-    elif isinstance(value, types.MethodType) or isinstance(value, types.FunctionType):
-        return Instance(Callable)
-    else:
-        for t in type(value).mro():
-            if get_defining_file(t) in ignore_files:
-                continue
-            elif t is object:
-                return Any()
-            elif hasattr(types, 'InstanceType') and t is types.InstanceType:
-                return Any()
-            else:
-                return Instance(t)
-        else:
-            return Any()
-
-
-def infer_value_types(values, depth=0):
-    """Infer a single type for an iterable of values.
-
-    >>> infer_value_types((1, 'x'))
-    Union(int, str)
-    >>> infer_value_types([])
-    Unknown
-    """
-    inferred = Unknown()
-    for value in sample(values):
-        type = infer_value_type(value, depth)
-        inferred = combine_types(inferred, type)
-    return inferred
-
-
-def sample(values):
-    # TODO only return a sample of values
-    return list(values)
-
-
-def union_many_types(*types):
-    union = Unknown()
-    for t in types:
-        union = combine_types(union, t)
-    return union
-
-
-def combine_types(x, y):
-    """Perform a union of two types.
-
-    >>> combine_types(Instance(int), None)
-    Optional[int]
-    """
-    if isinstance(x, Unknown):
-        return y
-    if isinstance(y, Unknown):
-        return x
-    if isinstance(x, Any):
-        return x
-    if isinstance(y, Any):
-        return y
-    if isinstance(x, Union):
-        return combine_either(x, y)
-    if isinstance(y, Union):
-        return combine_either(y, x)
-    if x == y:
-        return x
-    return simplify_either([x], [y])
-
-
-def combine_either(either, x):
-    if isinstance(x, Union):
-        xtypes = x.types
-    else:
-        xtypes = [x]
-    return simplify_either(either.types, xtypes)
-
-
-def simplify_either(x, y):
-    numerics = [Instance(int), Instance(float), Instance(complex)]
-
-    # TODO this is O(n**2); use an O(n) algorithm instead
-    result = list(x)
-    for type in y:
-        if isinstance(type, Generic):
-            for i, rt in enumerate(result):
-                if isinstance(rt, Generic) and type.typename == rt.typename:
-                    result[i] = Generic(rt.typename,
-                                        (combine_types(t, s)
-                                         for t, s in zip(type.args, rt.args)))
-                    break
-            else:
-                result.append(type)
-        elif isinstance(type, Tuple):
-            for i, rt in enumerate(result):
-                if isinstance(rt, Tuple) and len(type) == len(rt):
-                    result[i] = Tuple(combine_types(t, s)
-                                      for t, s in zip(type.itemtypes,
-                                                      rt.itemtypes))
-                    break
-            else:
-                result.append(type)
-        elif type in numerics:
-            for i, rt in enumerate(result):
-                if rt in numerics:
-                    result[i] = numerics[max(numerics.index(rt), numerics.index(type))]
-                    break
-            else:
-                result.append(type)
-        elif isinstance(type, Instance):
-            for i, rt in enumerate(result):
-                if isinstance(rt, Instance):
-                    # Union[A, SubclassOfA] -> A
-                    # Union[A, A] -> A, because issubclass(A, A) == True,
-                    if issubclass(type.typeobj, rt.typeobj):
-                        break
-                    elif issubclass(rt.typeobj, type.typeobj):
-                        result[i] = type
-                        break
-            else:
-                result.append(type)
-        elif type not in result:
-            result.append(type)
-
-    if len(result) > 1:
-        return Union(result)
-    else:
-        return result[0]
-
-
-class TypeBase(object):
-    """Abstract base class of all type objects.
-
-    Type objects use isinstance tests librarally -- they don't support duck
-    typing well.
-    """
-
-    def __eq__(self, other):
-        if type(other) is not type(self):
-            return False
-        for attr in self.__dict__:
-            if getattr(other, attr) != getattr(self, attr):
-                return False
-        return True
-
-    def __ne__(self, other):
-        return not self == other
-
-    def __repr__(self):
-        return str(self)
-
-
-class Instance(TypeBase):
-    def __init__(self, typeobj):
-        assert not inspect.isclass(typeobj) or not issubclass(typeobj, TypeBase)
-        self.typeobj = typeobj
-
-    def __str__(self):
-        # cheat on regular expression objects which have weird class names
-        # to be consistent with typing.py
-        if self.typeobj == Pattern:
-            return "Pattern"
-        elif self.typeobj == Match:
-            return "Match"
-        else:
-            return self.typeobj.__name__
-
-    def __repr__(self):
-        return 'Instance(%s)' % self
-
-
-class Generic(TypeBase):
-    def __init__(self, typename, args):
-        self.typename = typename
-        self.args = tuple(args)
-
-    def __str__(self):
-        return '%s[%s]' % (self.typename, ', '.join(str(t)
-                                                    for t in self.args))
-
-
-class Tuple(TypeBase):
-    def __init__(self, itemtypes):
-        self.itemtypes = tuple(itemtypes)
-
-    def __len__(self):
-        return len(self.itemtypes)
-
-    def __str__(self):
-        return 'Tuple[%s]' % (', '.join(str(t) for t in self.itemtypes))
-
-
-class Union(TypeBase):
-    def __init__(self, types):
-        assert len(types) > 1
-        self.types = tuple(types)
-
-    def __eq__(self, other):
-        if type(other) is not Union:
-            return False
-        # TODO this is O(n**2); use an O(n) algorithm instead
-        for t in self.types:
-            if t not in other.types:
-                return False
-        for t in other.types:
-            if t not in self.types:
-                return False
-        return True
-
-    def __str__(self):
-        types = list(self.types)
-        if str != bytes:  # on Python 2 str == bytes
-            if Instance(bytes) in types and Instance(str) in types:
-                # we Union[bytes, str] -> AnyStr as late as possible so we avoid
-                # corner cases like subclasses of bytes or str
-                types.remove(Instance(bytes))
-                types.remove(Instance(str))
-                types.append(Instance(AnyStr))
-        if len(types) == 1:
-            return str(types[0])
-        elif len(types) == 2 and None in types:
-            type = [t for t in types if t is not None][0]
-            return 'Optional[%s]' % type
-        else:
-            return 'Union[%s]' % (', '.join(sorted(str(t) for t in types)))
-
-
-class Unknown(TypeBase):
-    def __str__(self):
-        return 'Unknown'
-
-    def __repr__(self):
-        return 'Unknown()'
-
-
-class Any(TypeBase):
-    def __str__(self):
-        return 'Any'
-
-    def __repr__(self):
-        return 'Any()'
-
-
-class AnyStr(object): pass
-class Callable(object): pass
-import re
-Pattern = type(re.compile(u''))
-Match = type(re.match(u'', u''))
diff --git a/pinfer/test_pinfer.py b/pinfer/test_pinfer.py
deleted file mode 100644
index d6168db..0000000
--- a/pinfer/test_pinfer.py
+++ /dev/null
@@ -1,302 +0,0 @@
-"""Test cases for the infer module"""
-
-import unittest
-
-from pinfer import Instance, Generic, Tuple, Union, Unknown
-import pinfer
-
-
-class TestInfer(unittest.TestCase):
-    def setUp(self):
-        self.int = Instance(int)
-        self.float = Instance(float)
-
-    def tearDown(self):
-        pinfer.reset()
-
-    def test_instance(self):
-        i = self.int
-        self.assertEqual(i.typeobj, int)
-        self.assertEqual(str(i), 'int')
-        self.assertEqual(repr(i), 'Instance(int)')
-
-        self.assertTrue(i == Instance(int))
-        self.assertFalse(i != Instance(int))
-        self.assertTrue(i != self.float)
-        self.assertFalse(i == self.float)
-        self.assertNotEqual(i, None)
-
-    def test_generic_with_one_arg(self):
-        g = Generic('List', [self.int])
-        self.assertEqual(g.typename, 'List')
-        self.assertEqual(str(g.args), '(Instance(int),)')
-        self.assertEqual(str(g), 'List[int]')
-        self.assertEqual(repr(g), 'List[int]')
-
-        self.assertEqual(g, Generic('List', [self.int]))
-        self.assertNotEqual(g, Generic('Set', [self.int]))
-        self.assertNotEqual(g, Generic('List', [self.float]))
-        self.assertNotEqual(g, self.int)
-
-    def test_generic_with_two_args(self):
-        g = Generic('Dict', (self.int, self.float))
-        self.assertEqual(g.typename, 'Dict')
-        self.assertEqual(str(g), 'Dict[int, float]')
-
-    def test_tuple(self):
-        t0 = Tuple(())
-        t1 = Tuple([self.int])
-        t2 = Tuple((self.float, self.int))
-        self.assertEqual(t0.itemtypes, ())
-        self.assertEqual(str(t1.itemtypes[0]), 'int')
-        self.assertEqual(str(t2.itemtypes[0]), 'float')
-        self.assertEqual(str(t2.itemtypes[1]), 'int')
-        self.assertEqual(str(t0), 'Tuple[]')
-        self.assertEqual(str(t1), 'Tuple[int]')
-        self.assertEqual(str(t2), 'Tuple[float, int]')
-
-        self.assertEqual(t1, Tuple([self.int]))
-        self.assertNotEqual(t1, Tuple([self.float]))
-        self.assertNotEqual(t1, Tuple([self.int, self.int]))
-        self.assertNotEqual(t1, self.int)
-
-    def test_either(self):
-        i = self.int
-        f = self.float
-        s = Instance(str)
-
-        e2 = Union((i, f))
-        self.assertEqual(len(e2.types), 2)
-        self.assertEqual(str(e2), 'Union[float, int]')
-
-        self.assertEqual(e2, Union((i, f)))
-        self.assertEqual(e2, Union((f, i)))
-        self.assertNotEqual(e2, Union((i, s)))
-        self.assertNotEqual(e2, Union((i, f, s)))
-        self.assertNotEqual(Union((i, f, s)), e2)
-        self.assertNotEqual(e2, i)
-
-    def test_either_as_optional(self):
-        optint = Union((self.int, None))
-        self.assertEqual(str(optint), 'Optional[int]')
-        optfloat = Union((None, self.float))
-        self.assertEqual(str(optfloat), 'Optional[float]')
-        eithernone = Union((self.int, self.float, None))
-        self.assertEqual(str(eithernone), 'Union[None, float, int]')
-
-    def test_unknown(self):
-        unknown = Unknown()
-        self.assertEqual(str(unknown), 'Unknown')
-        self.assertEqual(repr(unknown), 'Unknown()')
-
-        self.assertEqual(unknown, Unknown())
-        self.assertNotEqual(unknown, self.int)
-
-    def test_combine_types(self):
-        i = self.int
-        f = self.float
-        s = Instance(str)
-        c = Instance(complex)
-        class Foo: pass
-        o = Instance(Foo)
-
-        # Simple types
-        self.assert_combine(i, i, i)
-        self.assert_combine(s, s, s)
-        self.assert_combine(i, s, Union((i, s)))
-        self.assert_combine(i, None, Union((i, None)))
-        # Unknowns
-        self.assert_combine(i, Unknown(), i)
-        self.assert_combine(Unknown(), Unknown(), Unknown())
-        # Union types
-        self.assert_combine(o, Union((f, s)), Union((o, f, s)))
-        self.assert_combine(i, Union((i, s)), Union((i, s)))
-        self.assert_combine(Union((o, f)), Union((o, s)), Union((o, f, s)))
-        # Tuple types
-        self.assert_combine(Tuple([i, i]), Tuple([i, i]), Tuple([i, i]))
-        self.assert_combine(Tuple([i, i]), Tuple([o, s]),
-                            Tuple([Union([o, i]), Union([s, i])]))
-        # Numeric types
-        self.assert_combine(i, f, f)
-        self.assert_combine(i, c, c)
-        self.assert_combine(c, f, c)
-        # Unions with numerics
-        self.assert_combine(i, Union((o, f)), Union((o, f)))
-        self.assert_combine(Union((o, f)), i, Union((o, f)))
-        self.assert_combine(Union((o, i)), f, Union((o, f)))
-        # Tuples with numerics
-        self.assert_combine(Tuple([i, i]), Tuple([f, i]), Tuple([f, i]))
-        self.assert_combine(Tuple([i, i]), Tuple([f, o]), Tuple([f, Union((i, o))]))
-        self.assert_combine(Tuple([f, i]), Tuple([i, o]), Tuple([f, Union((i, o))]))
-
-    def test_combine_special_cases(self):
-        i = self.int
-        f = self.float
-        u = Unknown()
-        def list_(x):
-            return Generic('List', [x])
-        # Simplify generic types.
-        self.assert_combine(list_(i), list_(u), list_(i))
-
-    def assert_combine(self, t, s, combined):
-        self.assertEqual(pinfer.combine_types(t, s), combined)
-        self.assertEqual(pinfer.combine_types(s, t), combined)
-
-    def test_sample(self):
-        sample = pinfer.sample
-        self.assertEqual(sample(()), [])
-        self.assertEqual(sample((1, 2)), [1, 2])
-        self.assertEqual(sample([]), [])
-        self.assertEqual(sample([1]), [1])
-        self.assertEqual(sample([1, 2]), [1, 2])
-        # TODO larger collections
-
-    def test_infer_simple_value_type(self):
-        self.assert_infer_type(1, 'int')
-        self.assert_infer_type('', 'str')
-        self.assert_infer_type(None, 'None')
-
-    def test_infer_collection_type(self):
-        # List
-        self.assert_infer_type([], 'List[Unknown]')
-        self.assert_infer_type([1], 'List[int]')
-        self.assert_infer_type([1, None], 'List[Optional[int]]')
-        # Dict
-        self.assert_infer_type({1: 'x', 2: None},
-                               'Dict[int, Optional[str]]')
-        # Set
-        self.assert_infer_type({1, None}, 'Set[Optional[int]]')
-        # Tuple
-        self.assert_infer_type((1, 'x'), 'Tuple[int, str]')
-        self.assert_infer_type((1, None) * 100, 'TupleSequence[Optional[int]]')
-
-    def assert_infer_type(self, value, type):
-        self.assertEqual(str(pinfer.infer_value_type(value)), type)
-
-    def test_infer_variables(self):
-        pinfer.infer_var('x', 1)
-        self.assert_infer_state('x: int')
-        pinfer.infer_var('x', 1)
-        pinfer.infer_var('x', None)
-        pinfer.infer_var('y', 1.1)
-        self.assert_infer_state('x: Optional[int]\n'
-                                'y: float')
-
-    def test_infer_instance_var(self):
-        class A: pass
-        a = A()
-        a.x = 1
-        a.y = 'x'
-        pinfer.infer_attrs(a)
-        self.assert_infer_state('A.x: int\n'
-                                'A.y: str')
-
-    def test_infer_class_var(self):
-        class A:
-            x = 1.1
-        pinfer.infer_attrs(A())
-        self.assert_infer_state('A.x: float')
-
-    def test_infer_function_attr(self):
-        class A:
-            def f(self): pass
-        a = A()
-        a.g = lambda x: 1
-        pinfer.infer_attrs(a)
-        self.assert_infer_state('A.g: Callable')
-
-    def test_infer_simple_function_signature(self):
-        @pinfer.infer_signature
-        def f(a):
-            return 'x'
-        f(1)
-        f(None)
-        self.assertEqual(f.__name__, 'f')
-        self.assert_infer_state('def f(a: Optional[int]) -> str')
-
-    def test_infer_function_with_two_args(self):
-        @pinfer.infer_signature
-        def f(x, y):
-            return x * y
-        f(1, 2)
-        f(1, 'x')
-        self.assert_infer_state(
-            'def f(x: int, y: Union[int, str]) -> Union[int, str]')
-
-    def test_infer_method(self):
-        class A:
-            @pinfer.infer_signature
-            def f(self, x): pass
-        A().f('x')
-        self.assert_infer_state('def f(self, x: str) -> None')
-
-    def test_infer_default_arg_values(self):
-        @pinfer.infer_signature
-        def f(x=1, y=None): pass
-        f()
-        self.assert_infer_state('def f(x: int, y: None) -> None')
-        f('x')
-        f('x', 1.1)
-        f()
-        self.assert_infer_state(
-            'def f(x: Union[int, str], y: Optional[float]) -> None')
-
-    def test_infer_varargs(self):
-        @pinfer.infer_signature
-        def f(x, *y): pass
-        f(1)
-        f(1, 'x', None)
-        self.assert_infer_state('def f(x: int, *y: Optional[str]) -> None')
-        f(1)
-        self.assert_infer_state('def f(x: int, *y: Unknown) -> None')
-
-    def test_infer_keyword_args(self):
-        @pinfer.infer_signature
-        def f(x): pass
-        f(x=1)
-        self.assert_infer_state('def f(x: int) -> None')
-
-        @pinfer.infer_signature
-        def f(x='x'): pass
-        f(x=1)
-        self.assert_infer_state('def f(x: int) -> None')
-
-    def test_infer_keyword_varargs(self):
-        @pinfer.infer_signature
-        def f(a, **kwargs): pass
-        f(None, x=1, y='x')
-        self.assert_infer_state(
-            'def f(a: None, **kwargs: Union[int, str]) -> None')
-
-    def test_infer_class(self):
-        @pinfer.infer_class
-        class A:
-            def f(self, x): return 0
-        A().f('x')
-        self.assert_infer_state('class A(...):\n'
-                                '    def f(self, x: str) -> int')
-
-        @pinfer.infer_class
-        class A:
-            def f(self, x): return 0
-        @pinfer.infer_class
-        class B:
-            def f(self): pass
-            def g(self): pass
-        A().f('')
-        B().f()
-        B().g()
-        self.assert_infer_state('class A(...):\n'
-                                '    def f(self, x: str) -> int\n'
-                                'class B(...):\n'
-                                '    def f(self) -> None\n'
-                                '    def g(self) -> None')
-
-    def assert_infer_state(self, expected):
-        state = pinfer.format_state()
-        self.assertEqual(state, expected)
-        pinfer.reset()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/pinfer/test_pinfer3.py b/pinfer/test_pinfer3.py
deleted file mode 100644
index 688e8c0..0000000
--- a/pinfer/test_pinfer3.py
+++ /dev/null
@@ -1,31 +0,0 @@
-""" tests cases that require python3 syntax """
-
-import unittest
-import pinfer
-
-# Include all of the shared unit tests
-from test_pinfer import TestInfer
-
-
-class TestInfer3(unittest.TestCase):
-    def test_infer_keyword_only_args(self):
-        # decorators break the parsing
-        def f(x, *, y=0): pass
-        f = pinfer.infer_signature(f)
-        f(1, y='x')
-        self.assert_infer_state(
-            'def f(x: int, *, y: str = 0) -> None')
-
-        def f(*, x=None, y=None): pass
-        f = pinfer.infer_signature(f)
-        f(y='x')
-        self.assert_infer_state(
-            'def f(*, x: None = None, y: str = None) -> None')
-
-    def assert_infer_state(self, expected):
-        state = pinfer.format_state()
-        self.assertEqual(state, expected)
-        pinfer.reset()
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/pinfer/unparse.py b/pinfer/unparse.py
deleted file mode 100644
index 6e1e493..0000000
--- a/pinfer/unparse.py
+++ /dev/null
@@ -1,610 +0,0 @@
-# From Python 2's Demo/parser/unparse.py
-# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
-
-"Usage: unparse.py <path to source file>"
-import sys
-import ast
-import cStringIO
-import os
-
-# Large float and imaginary literals get turned into infinities in the AST.
-# We unparse those infinities to INFSTR.
-INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
-
-def interleave(inter, f, seq):
-    """Call f on each item in seq, calling inter() in between.
-    """
-    seq = iter(seq)
-    try:
-        f(next(seq))
-    except StopIteration:
-        pass
-    else:
-        for x in seq:
-            inter()
-            f(x)
-
-class Unparser:
-    """Methods in this class recursively traverse an AST and
-    output source code for the abstract syntax; original formatting
-    is disregarded. """
-
-    def __init__(self, tree, file = sys.stdout):
-        """Unparser(tree, file=sys.stdout) -> None.
-         Print the source for tree to file."""
-        self.f = file
-        self.future_imports = []
-        self._indent = 0
-        self.dispatch(tree)
-        self.f.write("")
-        self.f.flush()
-
-    def fill(self, text = ""):
-        "Indent a piece of text, according to the current indentation level"
-        self.f.write("\n"+"    "*self._indent + text)
-
-    def write(self, text):
-        "Append a piece of text to the current line."
-        self.f.write(text)
-
-    def enter(self):
-        "Print ':', and increase the indentation."
-        self.write(":")
-        self._indent += 1
-
-    def leave(self):
-        "Decrease the indentation level."
-        self._indent -= 1
-
-    def dispatch(self, tree):
-        "Dispatcher function, dispatching tree type T to method _T."
-        if isinstance(tree, list):
-            for t in tree:
-                self.dispatch(t)
-            return
-        meth = getattr(self, "_"+tree.__class__.__name__)
-        meth(tree)
-
-
-    ############### Unparsing methods ######################
-    # There should be one method per concrete grammar type #
-    # Constructors should be grouped by sum type. Ideally, #
-    # this would follow the order in the grammar, but      #
-    # currently doesn't.                                   #
-    ########################################################
-
-    def _Module(self, tree):
-        for stmt in tree.body:
-            self.dispatch(stmt)
-
-    # stmt
-    def _Expr(self, tree):
-        self.fill()
-        self.dispatch(tree.value)
-
-    def _Import(self, t):
-        self.fill("import ")
-        interleave(lambda: self.write(", "), self.dispatch, t.names)
-
-    def _ImportFrom(self, t):
-        # A from __future__ import may affect unparsing, so record it.
-        if t.module and t.module == '__future__':
-            self.future_imports.extend(n.name for n in t.names)
-
-        self.fill("from ")
-        self.write("." * t.level)
-        if t.module:
-            self.write(t.module)
-        self.write(" import ")
-        interleave(lambda: self.write(", "), self.dispatch, t.names)
-
-    def _Assign(self, t):
-        self.fill()
-        for target in t.targets:
-            self.dispatch(target)
-            self.write(" = ")
-        self.dispatch(t.value)
-
-    def _AugAssign(self, t):
-        self.fill()
-        self.dispatch(t.target)
-        self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
-        self.dispatch(t.value)
-
-    def _Return(self, t):
-        self.fill("return")
-        if t.value:
-            self.write(" ")
-            self.dispatch(t.value)
-
-    def _Pass(self, t):
-        self.fill("pass")
-
-    def _Break(self, t):
-        self.fill("break")
-
-    def _Continue(self, t):
-        self.fill("continue")
-
-    def _Delete(self, t):
-        self.fill("del ")
-        interleave(lambda: self.write(", "), self.dispatch, t.targets)
-
-    def _Assert(self, t):
-        self.fill("assert ")
-        self.dispatch(t.test)
-        if t.msg:
-            self.write(", ")
-            self.dispatch(t.msg)
-
-    def _Exec(self, t):
-        self.fill("exec ")
-        self.dispatch(t.body)
-        if t.globals:
-            self.write(" in ")
-            self.dispatch(t.globals)
-        if t.locals:
-            self.write(", ")
-            self.dispatch(t.locals)
-
-    def _Print(self, t):
-        self.fill("print ")
-        do_comma = False
-        if t.dest:
-            self.write(">>")
-            self.dispatch(t.dest)
-            do_comma = True
-        for e in t.values:
-            if do_comma:self.write(", ")
-            else:do_comma=True
-            self.dispatch(e)
-        if not t.nl:
-            self.write(",")
-
-    def _Global(self, t):
-        self.fill("global ")
-        interleave(lambda: self.write(", "), self.write, t.names)
-
-    def _Yield(self, t):
-        self.write("(")
-        self.write("yield")
-        if t.value:
-            self.write(" ")
-            self.dispatch(t.value)
-        self.write(")")
-
-    def _Raise(self, t):
-        self.fill('raise ')
-        if t.type:
-            self.dispatch(t.type)
-        if t.inst:
-            self.write(", ")
-            self.dispatch(t.inst)
-        if t.tback:
-            self.write(", ")
-            self.dispatch(t.tback)
-
-    def _TryExcept(self, t):
-        self.fill("try")
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-
-        for ex in t.handlers:
-            self.dispatch(ex)
-        if t.orelse:
-            self.fill("else")
-            self.enter()
-            self.dispatch(t.orelse)
-            self.leave()
-
-    def _TryFinally(self, t):
-        if len(t.body) == 1 and isinstance(t.body[0], ast.TryExcept):
-            # try-except-finally
-            self.dispatch(t.body)
-        else:
-            self.fill("try")
-            self.enter()
-            self.dispatch(t.body)
-            self.leave()
-
-        self.fill("finally")
-        self.enter()
-        self.dispatch(t.finalbody)
-        self.leave()
-
-    def _ExceptHandler(self, t):
-        self.fill("except")
-        if t.type:
-            self.write(" ")
-            self.dispatch(t.type)
-        if t.name:
-            self.write(" as ")
-            self.dispatch(t.name)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-
-    def _ClassDef(self, t):
-        self.write("\n")
-        for deco in t.decorator_list:
-            self.fill("@")
-            self.dispatch(deco)
-        self.fill("class "+t.name)
-        if t.bases:
-            self.write("(")
-            for a in t.bases:
-                self.dispatch(a)
-                self.write(", ")
-            self.write(")")
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-
-    def _FunctionDef(self, t):
-        self.write("\n")
-        for deco in t.decorator_list:
-            self.fill("@")
-            self.dispatch(deco)
-        self.fill("def "+t.name + "(")
-        self.dispatch(t.args)
-        self.write(")")
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-
-    def _For(self, t):
-        self.fill("for ")
-        self.dispatch(t.target)
-        self.write(" in ")
-        self.dispatch(t.iter)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-        if t.orelse:
-            self.fill("else")
-            self.enter()
-            self.dispatch(t.orelse)
-            self.leave()
-
-    def _If(self, t):
-        self.fill("if ")
-        self.dispatch(t.test)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-        # collapse nested ifs into equivalent elifs.
-        while (t.orelse and len(t.orelse) == 1 and
-               isinstance(t.orelse[0], ast.If)):
-            t = t.orelse[0]
-            self.fill("elif ")
-            self.dispatch(t.test)
-            self.enter()
-            self.dispatch(t.body)
-            self.leave()
-        # final else
-        if t.orelse:
-            self.fill("else")
-            self.enter()
-            self.dispatch(t.orelse)
-            self.leave()
-
-    def _While(self, t):
-        self.fill("while ")
-        self.dispatch(t.test)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-        if t.orelse:
-            self.fill("else")
-            self.enter()
-            self.dispatch(t.orelse)
-            self.leave()
-
-    def _With(self, t):
-        self.fill("with ")
-        self.dispatch(t.context_expr)
-        if t.optional_vars:
-            self.write(" as ")
-            self.dispatch(t.optional_vars)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-
-    # expr
-    def _Str(self, tree):
-        # if from __future__ import unicode_literals is in effect,
-        # then we want to output string literals using a 'b' prefix
-        # and unicode literals with no prefix.
-        if "unicode_literals" not in self.future_imports:
-            self.write(repr(tree.s))
-        elif isinstance(tree.s, str):
-            self.write("b" + repr(tree.s))
-        elif isinstance(tree.s, unicode):
-            self.write(repr(tree.s).lstrip("u"))
-        else:
-            assert False, "shouldn't get here"
-
-    def _Name(self, t):
-        self.write(t.id)
-
-    def _Repr(self, t):
-        self.write("`")
-        self.dispatch(t.value)
-        self.write("`")
-
-    def _Num(self, t):
-        repr_n = repr(t.n)
-        # Parenthesize negative numbers, to avoid turning (-1)**2 into -1**2.
-        if repr_n.startswith("-"):
-            self.write("(")
-        # Substitute overflowing decimal literal for AST infinities.
-        self.write(repr_n.replace("inf", INFSTR))
-        if repr_n.startswith("-"):
-            self.write(")")
-
-    def _List(self, t):
-        self.write("[")
-        interleave(lambda: self.write(", "), self.dispatch, t.elts)
-        self.write("]")
-
-    def _ListComp(self, t):
-        self.write("[")
-        self.dispatch(t.elt)
-        for gen in t.generators:
-            self.dispatch(gen)
-        self.write("]")
-
-    def _GeneratorExp(self, t):
-        self.write("(")
-        self.dispatch(t.elt)
-        for gen in t.generators:
-            self.dispatch(gen)
-        self.write(")")
-
-    def _SetComp(self, t):
-        self.write("{")
-        self.dispatch(t.elt)
-        for gen in t.generators:
-            self.dispatch(gen)
-        self.write("}")
-
-    def _DictComp(self, t):
-        self.write("{")
-        self.dispatch(t.key)
-        self.write(": ")
-        self.dispatch(t.value)
-        for gen in t.generators:
-            self.dispatch(gen)
-        self.write("}")
-
-    def _comprehension(self, t):
-        self.write(" for ")
-        self.dispatch(t.target)
-        self.write(" in ")
-        self.dispatch(t.iter)
-        for if_clause in t.ifs:
-            self.write(" if ")
-            self.dispatch(if_clause)
-
-    def _IfExp(self, t):
-        self.write("(")
-        self.dispatch(t.body)
-        self.write(" if ")
-        self.dispatch(t.test)
-        self.write(" else ")
-        self.dispatch(t.orelse)
-        self.write(")")
-
-    def _Set(self, t):
-        assert(t.elts) # should be at least one element
-        self.write("{")
-        interleave(lambda: self.write(", "), self.dispatch, t.elts)
-        self.write("}")
-
-    def _Dict(self, t):
-        self.write("{")
-        def write_pair(pair):
-            (k, v) = pair
-            self.dispatch(k)
-            self.write(": ")
-            self.dispatch(v)
-        interleave(lambda: self.write(", "), write_pair, zip(t.keys, t.values))
-        self.write("}")
-
-    def _Tuple(self, t):
-        self.write("(")
-        if len(t.elts) == 1:
-            (elt,) = t.elts
-            self.dispatch(elt)
-            self.write(",")
-        else:
-            interleave(lambda: self.write(", "), self.dispatch, t.elts)
-        self.write(")")
-
-    unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
-    def _UnaryOp(self, t):
-        self.write("(")
-        self.write(self.unop[t.op.__class__.__name__])
-        self.write(" ")
-        # If we're applying unary minus to a number, parenthesize the number.
-        # This is necessary: -2147483648 is different from -(2147483648) on
-        # a 32-bit machine (the first is an int, the second a long), and
-        # -7j is different from -(7j).  (The first has real part 0.0, the second
-        # has real part -0.0.)
-        if isinstance(t.op, ast.USub) and isinstance(t.operand, ast.Num):
-            self.write("(")
-            self.dispatch(t.operand)
-            self.write(")")
-        else:
-            self.dispatch(t.operand)
-        self.write(")")
-
-    binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
-                    "LShift":"<<", "RShift":">>", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
-                    "FloorDiv":"//", "Pow": "**"}
-    def _BinOp(self, t):
-        self.write("(")
-        self.dispatch(t.left)
-        self.write(" " + self.binop[t.op.__class__.__name__] + " ")
-        self.dispatch(t.right)
-        self.write(")")
-
-    cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
-                        "Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
-    def _Compare(self, t):
-        self.write("(")
-        self.dispatch(t.left)
-        for o, e in zip(t.ops, t.comparators):
-            self.write(" " + self.cmpops[o.__class__.__name__] + " ")
-            self.dispatch(e)
-        self.write(")")
-
-    boolops = {ast.And: 'and', ast.Or: 'or'}
-    def _BoolOp(self, t):
-        self.write("(")
-        s = " %s " % self.boolops[t.op.__class__]
-        interleave(lambda: self.write(s), self.dispatch, t.values)
-        self.write(")")
-
-    def _Attribute(self,t):
-        self.dispatch(t.value)
-        # Special case: 3.__abs__() is a syntax error, so if t.value
-        # is an integer literal then we need to either parenthesize
-        # it or add an extra space to get 3 .__abs__().
-        if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
-            self.write(" ")
-        self.write(".")
-        self.write(t.attr)
-
-    def _Call(self, t):
-        self.dispatch(t.func)
-        self.write("(")
-        comma = False
-        for e in t.args:
-            if comma: self.write(", ")
-            else: comma = True
-            self.dispatch(e)
-        for e in t.keywords:
-            if comma: self.write(", ")
-            else: comma = True
-            self.dispatch(e)
-        if t.starargs:
-            if comma: self.write(", ")
-            else: comma = True
-            self.write("*")
-            self.dispatch(t.starargs)
-        if t.kwargs:
-            if comma: self.write(", ")
-            else: comma = True
-            self.write("**")
-            self.dispatch(t.kwargs)
-        self.write(")")
-
-    def _Subscript(self, t):
-        self.dispatch(t.value)
-        self.write("[")
-        self.dispatch(t.slice)
-        self.write("]")
-
-    # slice
-    def _Ellipsis(self, t):
-        self.write("...")
-
-    def _Index(self, t):
-        self.dispatch(t.value)
-
-    def _Slice(self, t):
-        if t.lower:
-            self.dispatch(t.lower)
-        self.write(":")
-        if t.upper:
-            self.dispatch(t.upper)
-        if t.step:
-            self.write(":")
-            self.dispatch(t.step)
-
-    def _ExtSlice(self, t):
-        interleave(lambda: self.write(', '), self.dispatch, t.dims)
-
-    # others
-    def _arguments(self, t):
-        first = True
-        # normal arguments
-        defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
-        for a,d in zip(t.args, defaults):
-            if first:first = False
-            else: self.write(", ")
-            self.dispatch(a),
-            if d:
-                self.write("=")
-                self.dispatch(d)
-
-        # varargs
-        if t.vararg:
-            if first:first = False
-            else: self.write(", ")
-            self.write("*")
-            self.write(t.vararg)
-
-        # kwargs
-        if t.kwarg:
-            if first:first = False
-            else: self.write(", ")
-            self.write("**"+t.kwarg)
-
-    def _keyword(self, t):
-        self.write(t.arg)
-        self.write("=")
-        self.dispatch(t.value)
-
-    def _Lambda(self, t):
-        self.write("(")
-        self.write("lambda ")
-        self.dispatch(t.args)
-        self.write(": ")
-        self.dispatch(t.body)
-        self.write(")")
-
-    def _alias(self, t):
-        self.write(t.name)
-        if t.asname:
-            self.write(" as "+t.asname)
-
-def roundtrip(filename, output=sys.stdout):
-    with open(filename, "r") as pyfile:
-        source = pyfile.read()
-    tree = compile(source, filename, "exec", ast.PyCF_ONLY_AST)
-    Unparser(tree, output)
-
-
-
-def testdir(a):
-    try:
-        names = [n for n in os.listdir(a) if n.endswith('.py')]
-    except OSError:
-        sys.stderr.write("Directory not readable: %s" % a)
-    else:
-        for n in names:
-            fullname = os.path.join(a, n)
-            if os.path.isfile(fullname):
-                output = cStringIO.StringIO()
-                print 'Testing %s' % fullname
-                try:
-                    roundtrip(fullname, output)
-                except Exception as e:
-                    print '  Failed to compile, exception is %s' % repr(e)
-            elif os.path.isdir(fullname):
-                testdir(fullname)
-
-def main(args):
-    if args[0] == '--testdir':
-        for a in args[1:]:
-            testdir(a)
-    else:
-        for a in args:
-            roundtrip(a)
-
-if __name__=='__main__':
-    main(sys.argv[1:])
diff --git a/pinfer/unparse3.py b/pinfer/unparse3.py
deleted file mode 100644
index 0936cb2..0000000
--- a/pinfer/unparse3.py
+++ /dev/null
@@ -1,610 +0,0 @@
-# From Python 3's Tools/parser/unparse.py
-# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
-
-"Usage: unparse.py <path to source file>"
-import sys
-import ast
-import tokenize
-import io
-import os
-
-# Large float and imaginary literals get turned into infinities in the AST.
-# We unparse those infinities to INFSTR.
-INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
-
-def interleave(inter, f, seq):
-    """Call f on each item in seq, calling inter() in between.
-    """
-    seq = iter(seq)
-    try:
-        f(next(seq))
-    except StopIteration:
-        pass
-    else:
-        for x in seq:
-            inter()
-            f(x)
-
-class Unparser:
-    """Methods in this class recursively traverse an AST and
-    output source code for the abstract syntax; original formatting
-    is disregarded. """
-
-    def __init__(self, tree, file = sys.stdout):
-        """Unparser(tree, file=sys.stdout) -> None.
-         Print the source for tree to file."""
-        self.f = file
-        self._indent = 0
-        self.dispatch(tree)
-        print("", file=self.f)
-        self.f.flush()
-
-    def fill(self, text = ""):
-        "Indent a piece of text, according to the current indentation level"
-        self.f.write("\n"+"    "*self._indent + text)
-
-    def write(self, text):
-        "Append a piece of text to the current line."
-        self.f.write(text)
-
-    def enter(self):
-        "Print ':', and increase the indentation."
-        self.write(":")
-        self._indent += 1
-
-    def leave(self):
-        "Decrease the indentation level."
-        self._indent -= 1
-
-    def dispatch(self, tree):
-        "Dispatcher function, dispatching tree type T to method _T."
-        if isinstance(tree, list):
-            for t in tree:
-                self.dispatch(t)
-            return
-        meth = getattr(self, "_"+tree.__class__.__name__)
-        meth(tree)
-
-
-    ############### Unparsing methods ######################
-    # There should be one method per concrete grammar type #
-    # Constructors should be grouped by sum type. Ideally, #
-    # this would follow the order in the grammar, but      #
-    # currently doesn't.                                   #
-    ########################################################
-
-    def _Module(self, tree):
-        for stmt in tree.body:
-            self.dispatch(stmt)
-
-    # stmt
-    def _Expr(self, tree):
-        self.fill()
-        self.dispatch(tree.value)
-
-    def _Import(self, t):
-        self.fill("import ")
-        interleave(lambda: self.write(", "), self.dispatch, t.names)
-
-    def _ImportFrom(self, t):
-        self.fill("from ")
-        self.write("." * t.level)
-        if t.module:
-            self.write(t.module)
-        self.write(" import ")
-        interleave(lambda: self.write(", "), self.dispatch, t.names)
-
-    def _Assign(self, t):
-        self.fill()
-        for target in t.targets:
-            self.dispatch(target)
-            self.write(" = ")
-        self.dispatch(t.value)
-
-    def _AugAssign(self, t):
-        self.fill()
-        self.dispatch(t.target)
-        self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
-        self.dispatch(t.value)
-
-    def _Return(self, t):
-        self.fill("return")
-        if t.value:
-            self.write(" ")
-            self.dispatch(t.value)
-
-    def _Pass(self, t):
-        self.fill("pass")
-
-    def _Break(self, t):
-        self.fill("break")
-
-    def _Continue(self, t):
-        self.fill("continue")
-
-    def _Delete(self, t):
-        self.fill("del ")
-        interleave(lambda: self.write(", "), self.dispatch, t.targets)
-
-    def _Assert(self, t):
-        self.fill("assert ")
-        self.dispatch(t.test)
-        if t.msg:
-            self.write(", ")
-            self.dispatch(t.msg)
-
-    def _Global(self, t):
-        self.fill("global ")
-        interleave(lambda: self.write(", "), self.write, t.names)
-
-    def _Nonlocal(self, t):
-        self.fill("nonlocal ")
-        interleave(lambda: self.write(", "), self.write, t.names)
-
-    def _Yield(self, t):
-        self.write("(")
-        self.write("yield")
-        if t.value:
-            self.write(" ")
-            self.dispatch(t.value)
-        self.write(")")
-
-    def _YieldFrom(self, t):
-        self.write("(")
-        self.write("yield from")
-        if t.value:
-            self.write(" ")
-            self.dispatch(t.value)
-        self.write(")")
-
-    def _Raise(self, t):
-        self.fill("raise")
-        if not t.exc:
-            assert not t.cause
-            return
-        self.write(" ")
-        self.dispatch(t.exc)
-        if t.cause:
-            self.write(" from ")
-            self.dispatch(t.cause)
-
-    def _Try(self, t):
-        self.fill("try")
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-        for ex in t.handlers:
-            self.dispatch(ex)
-        if t.orelse:
-            self.fill("else")
-            self.enter()
-            self.dispatch(t.orelse)
-            self.leave()
-        if t.finalbody:
-            self.fill("finally")
-            self.enter()
-            self.dispatch(t.finalbody)
-            self.leave()
-
-    def _ExceptHandler(self, t):
-        self.fill("except")
-        if t.type:
-            self.write(" ")
-            self.dispatch(t.type)
-        if t.name:
-            self.write(" as ")
-            self.write(t.name)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-
-    def _ClassDef(self, t):
-        self.write("\n")
-        for deco in t.decorator_list:
-            self.fill("@")
-            self.dispatch(deco)
-        self.fill("class "+t.name)
-        self.write("(")
-        comma = False
-        for e in t.bases:
-            if comma: self.write(", ")
-            else: comma = True
-            self.dispatch(e)
-        for e in t.keywords:
-            if comma: self.write(", ")
-            else: comma = True
-            self.dispatch(e)
-        if t.starargs:
-            if comma: self.write(", ")
-            else: comma = True
-            self.write("*")
-            self.dispatch(t.starargs)
-        if t.kwargs:
-            if comma: self.write(", ")
-            else: comma = True
-            self.write("**")
-            self.dispatch(t.kwargs)
-        self.write(")")
-
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-
-    def _FunctionDef(self, t):
-        self.write("\n")
-        for deco in t.decorator_list:
-            self.fill("@")
-            self.dispatch(deco)
-        self.fill("def "+t.name + "(")
-        self.dispatch(t.args)
-        self.write(")")
-        if t.returns:
-            self.write(" -> ")
-            self.dispatch(t.returns)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-
-    def _For(self, t):
-        self.fill("for ")
-        self.dispatch(t.target)
-        self.write(" in ")
-        self.dispatch(t.iter)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-        if t.orelse:
-            self.fill("else")
-            self.enter()
-            self.dispatch(t.orelse)
-            self.leave()
-
-    def _If(self, t):
-        self.fill("if ")
-        self.dispatch(t.test)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-        # collapse nested ifs into equivalent elifs.
-        while (t.orelse and len(t.orelse) == 1 and
-               isinstance(t.orelse[0], ast.If)):
-            t = t.orelse[0]
-            self.fill("elif ")
-            self.dispatch(t.test)
-            self.enter()
-            self.dispatch(t.body)
-            self.leave()
-        # final else
-        if t.orelse:
-            self.fill("else")
-            self.enter()
-            self.dispatch(t.orelse)
-            self.leave()
-
-    def _While(self, t):
-        self.fill("while ")
-        self.dispatch(t.test)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-        if t.orelse:
-            self.fill("else")
-            self.enter()
-            self.dispatch(t.orelse)
-            self.leave()
-
-    def _With(self, t):
-        self.fill("with ")
-        interleave(lambda: self.write(", "), self.dispatch, t.items)
-        self.enter()
-        self.dispatch(t.body)
-        self.leave()
-
-    # expr
-    def _Bytes(self, t):
-        self.write(repr(t.s))
-
-    def _Str(self, tree):
-        self.write(repr(tree.s))
-
-    def _Name(self, t):
-        self.write(t.id)
-
-    def _NameConstant(self, t):
-        self.write(repr(t.value))
-
-    def _Num(self, t):
-        # Substitute overflowing decimal literal for AST infinities.
-        self.write(repr(t.n).replace("inf", INFSTR))
-
-    def _List(self, t):
-        self.write("[")
-        interleave(lambda: self.write(", "), self.dispatch, t.elts)
-        self.write("]")
-
-    def _ListComp(self, t):
-        self.write("[")
-        self.dispatch(t.elt)
-        for gen in t.generators:
-            self.dispatch(gen)
-        self.write("]")
-
-    def _GeneratorExp(self, t):
-        self.write("(")
-        self.dispatch(t.elt)
-        for gen in t.generators:
-            self.dispatch(gen)
-        self.write(")")
-
-    def _SetComp(self, t):
-        self.write("{")
-        self.dispatch(t.elt)
-        for gen in t.generators:
-            self.dispatch(gen)
-        self.write("}")
-
-    def _DictComp(self, t):
-        self.write("{")
-        self.dispatch(t.key)
-        self.write(": ")
-        self.dispatch(t.value)
-        for gen in t.generators:
-            self.dispatch(gen)
-        self.write("}")
-
-    def _comprehension(self, t):
-        self.write(" for ")
-        self.dispatch(t.target)
-        self.write(" in ")
-        self.dispatch(t.iter)
-        for if_clause in t.ifs:
-            self.write(" if ")
-            self.dispatch(if_clause)
-
-    def _IfExp(self, t):
-        self.write("(")
-        self.dispatch(t.body)
-        self.write(" if ")
-        self.dispatch(t.test)
-        self.write(" else ")
-        self.dispatch(t.orelse)
-        self.write(")")
-
-    def _Set(self, t):
-        assert(t.elts) # should be at least one element
-        self.write("{")
-        interleave(lambda: self.write(", "), self.dispatch, t.elts)
-        self.write("}")
-
-    def _Dict(self, t):
-        self.write("{")
-        def write_pair(pair):
-            (k, v) = pair
-            self.dispatch(k)
-            self.write(": ")
-            self.dispatch(v)
-        interleave(lambda: self.write(", "), write_pair, zip(t.keys, t.values))
-        self.write("}")
-
-    def _Tuple(self, t):
-        self.write("(")
-        if len(t.elts) == 1:
-            (elt,) = t.elts
-            self.dispatch(elt)
-            self.write(",")
-        else:
-            interleave(lambda: self.write(", "), self.dispatch, t.elts)
-        self.write(")")
-
-    unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
-    def _UnaryOp(self, t):
-        self.write("(")
-        self.write(self.unop[t.op.__class__.__name__])
-        self.write(" ")
-        self.dispatch(t.operand)
-        self.write(")")
-
-    binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
-                    "LShift":"<<", "RShift":">>", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
-                    "FloorDiv":"//", "Pow": "**"}
-    def _BinOp(self, t):
-        self.write("(")
-        self.dispatch(t.left)
-        self.write(" " + self.binop[t.op.__class__.__name__] + " ")
-        self.dispatch(t.right)
-        self.write(")")
-
-    cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
-                        "Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
-    def _Compare(self, t):
-        self.write("(")
-        self.dispatch(t.left)
-        for o, e in zip(t.ops, t.comparators):
-            self.write(" " + self.cmpops[o.__class__.__name__] + " ")
-            self.dispatch(e)
-        self.write(")")
-
-    boolops = {ast.And: 'and', ast.Or: 'or'}
-    def _BoolOp(self, t):
-        self.write("(")
-        s = " %s " % self.boolops[t.op.__class__]
-        interleave(lambda: self.write(s), self.dispatch, t.values)
-        self.write(")")
-
-    def _Attribute(self,t):
-        self.dispatch(t.value)
-        # Special case: 3.__abs__() is a syntax error, so if t.value
-        # is an integer literal then we need to either parenthesize
-        # it or add an extra space to get 3 .__abs__().
-        if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
-            self.write(" ")
-        self.write(".")
-        self.write(t.attr)
-
-    def _Call(self, t):
-        self.dispatch(t.func)
-        self.write("(")
-        comma = False
-        for e in t.args:
-            if comma: self.write(", ")
-            else: comma = True
-            self.dispatch(e)
-        for e in t.keywords:
-            if comma: self.write(", ")
-            else: comma = True
-            self.dispatch(e)
-        if t.starargs:
-            if comma: self.write(", ")
-            else: comma = True
-            self.write("*")
-            self.dispatch(t.starargs)
-        if t.kwargs:
-            if comma: self.write(", ")
-            else: comma = True
-            self.write("**")
-            self.dispatch(t.kwargs)
-        self.write(")")
-
-    def _Subscript(self, t):
-        self.dispatch(t.value)
-        self.write("[")
-        self.dispatch(t.slice)
-        self.write("]")
-
-    def _Starred(self, t):
-        self.write("*")
-        self.dispatch(t.value)
-
-    # slice
-    def _Ellipsis(self, t):
-        self.write("...")
-
-    def _Index(self, t):
-        self.dispatch(t.value)
-
-    def _Slice(self, t):
-        if t.lower:
-            self.dispatch(t.lower)
-        self.write(":")
-        if t.upper:
-            self.dispatch(t.upper)
-        if t.step:
-            self.write(":")
-            self.dispatch(t.step)
-
-    def _ExtSlice(self, t):
-        interleave(lambda: self.write(', '), self.dispatch, t.dims)
-
-    # argument
-    def _arg(self, t):
-        self.write(t.arg)
-        if t.annotation:
-            self.write(": ")
-            self.dispatch(t.annotation)
-
-    # others
-    def _arguments(self, t):
-        first = True
-        # normal arguments
-        defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
-        for a, d in zip(t.args, defaults):
-            if first:first = False
-            else: self.write(", ")
-            self.dispatch(a)
-            if d:
-                self.write("=")
-                self.dispatch(d)
-
-        # varargs, or bare '*' if no varargs but keyword-only arguments present
-        if t.vararg or t.kwonlyargs:
-            if first:first = False
-            else: self.write(", ")
-            self.write("*")
-            if t.vararg:
-                self.write(t.vararg.arg)
-                if t.vararg.annotation:
-                    self.write(": ")
-                    self.dispatch(t.vararg.annotation)
-
-        # keyword-only arguments
-        if t.kwonlyargs:
-            for a, d in zip(t.kwonlyargs, t.kw_defaults):
-                if first:first = False
-                else: self.write(", ")
-                self.dispatch(a),
-                if d:
-                    self.write("=")
-                    self.dispatch(d)
-
-        # kwargs
-        if t.kwarg:
-            if first:first = False
-            else: self.write(", ")
-            self.write("**"+t.kwarg.arg)
-            if t.kwarg.annotation:
-                self.write(": ")
-                self.dispatch(t.kwarg.annotation)
-
-    def _keyword(self, t):
-        self.write(t.arg)
-        self.write("=")
-        self.dispatch(t.value)
-
-    def _Lambda(self, t):
-        self.write("(")
-        self.write("lambda ")
-        self.dispatch(t.args)
-        self.write(": ")
-        self.dispatch(t.body)
-        self.write(")")
-
-    def _alias(self, t):
-        self.write(t.name)
-        if t.asname:
-            self.write(" as "+t.asname)
-
-    def _withitem(self, t):
-        self.dispatch(t.context_expr)
-        if t.optional_vars:
-            self.write(" as ")
-            self.dispatch(t.optional_vars)
-
-def roundtrip(filename, output=sys.stdout):
-    with open(filename, "rb") as pyfile:
-        encoding = tokenize.detect_encoding(pyfile.readline)[0]
-    with open(filename, "r", encoding=encoding) as pyfile:
-        source = pyfile.read()
-    tree = compile(source, filename, "exec", ast.PyCF_ONLY_AST)
-    Unparser(tree, output)
-
-
-
-def testdir(a):
-    try:
-        names = [n for n in os.listdir(a) if n.endswith('.py')]
-    except OSError:
-        print("Directory not readable: %s" % a, file=sys.stderr)
-    else:
-        for n in names:
-            fullname = os.path.join(a, n)
-            if os.path.isfile(fullname):
-                output = io.StringIO()
-                print('Testing %s' % fullname)
-                try:
-                    roundtrip(fullname, output)
-                except Exception as e:
-                    print('  Failed to compile, exception is %s' % repr(e))
-            elif os.path.isdir(fullname):
-                testdir(fullname)
-
-def main(args):
-    if args[0] == '--testdir':
-        for a in args[1:]:
-            testdir(a)
-    else:
-        for a in args:
-            roundtrip(a)
-
-if __name__=='__main__':
-    main(sys.argv[1:])
diff --git a/pytest.ini b/pytest.ini
deleted file mode 100644
index cfbac25..0000000
--- a/pytest.ini
+++ /dev/null
@@ -1,21 +0,0 @@
-[pytest]
-# testpaths is new in 2.8
-minversion = 2.8
-
-testpaths = mypy/test
-
-python_files = test*.py
-
-# Where do the test cases come from?  We provide our own collection
-# logic by implementing `pytest_pycollect_makeitem` in mypy.test.data;
-# the test files import that module, and pytest sees the magic name
-# and invokes it at the relevant moment.  See
-# http://doc.pytest.org/en/latest/writing_plugins.html#collection-hooks
-
-# Because we provide our own collection logic, disable the default
-# python collector by giving it empty patterns to search for.
-python_classes =
-python_functions =
-
-# always run in parallel (requires pytest-xdist, see test-requirements.txt)
-addopts = -nauto --cov-append --cov-report=
diff --git a/runtests.py b/runtests.py
index 634b4ce..2f1bc7a 100755
--- a/runtests.py
+++ b/runtests.py
@@ -1,17 +1,14 @@
 #!/usr/bin/env python3
 """Mypy test runner."""
 
-from typing import Dict, List, Optional, Set, Iterable
+from typing import Dict, List, Optional, Set, Iterable, Tuple
 
 from mypy.waiter import Waiter, LazySubprocess
 from mypy import util
-from mypy.test.config import test_data_prefix
-from mypy.test.testpythoneval import python_eval_files, python_34_eval_files
 
 import itertools
 import os
 from os.path import join, isdir
-import re
 import sys
 
 
@@ -92,16 +89,19 @@ class Driver:
     def add_mypy_string(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
         self.add_mypy_cmd(name, ['-c'] + list(args), cwd=cwd)
 
-    def add_pytest(self, name: str, pytest_args: List[str], coverage: bool = False) -> None:
-        full_name = 'pytest %s' % name
-        if not self.allow(full_name):
+    def add_pytest(self, files: List[Tuple[str, str]], coverage: bool = True) -> None:
+        pytest_files = [name for kind, name in files
+                        if self.allow('pytest {} {}'.format(kind, name))]
+        if not pytest_files:
             return
+        pytest_args = pytest_files + self.arglist + self.pyt_arglist
         if coverage and self.coverage:
             args = [sys.executable, '-m', 'pytest', '--cov=mypy'] + pytest_args
         else:
             args = [sys.executable, '-m', 'pytest'] + pytest_args
 
-        self.waiter.add(LazySubprocess(full_name, args, env=self.env, passthrough=self.verbosity),
+        self.waiter.add(LazySubprocess('pytest', args, env=self.env,
+                                       passthrough=self.verbosity),
                         sequential=True)
 
     def add_python(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
@@ -170,9 +170,7 @@ def add_basic(driver: Driver) -> None:
 
 
 def add_selftypecheck(driver: Driver) -> None:
-    driver.add_mypy_package('package mypy nonstrict optional', 'mypy', '--config-file',
-                            'mypy_self_check.ini')
-    driver.add_mypy_package('package mypy', 'mypy', '--config-file', 'mypy_strict_optional.ini')
+    driver.add_mypy_package('package mypy', 'mypy', '--config-file', 'mypy_self_check.ini')
 
 
 def find_files(base: str, prefix: str = '', suffix: str = '') -> List[str]:
@@ -199,64 +197,56 @@ def add_imports(driver: Driver) -> None:
             driver.add_python_string('import %s' % mod, 'import %s' % mod)
 
 
-PYTEST_FILES = [os.path.join('mypy', 'test', '{}.py'.format(name)) for name in [
+def test_path(*names: str):
+    return [os.path.join('mypy', 'test', '{}.py'.format(name))
+            for name in names]
+
+
+PYTEST_FILES = test_path(
     'testcheck',
     'testextensions',
     'testdeps',
     'testdiff',
     'testfinegrained',
     'testmerge',
-]]
+    'testtransform',
+    'testtypegen',
+    'testparse',
+    'testsemanal'
+)
+
+SLOW_FILES = test_path(
+    'testpythoneval',
+    'testcmdline',
+    'teststubgen',
+)
+
+MYUNIT_FILES = test_path(
+    'teststubgen',
+    'testargs',
+    'testgraph',
+    'testinfer',
+    'testmoduleinfo',
+    'testreports',
+    'testsolve',
+    'testsubtypes',
+    'testtypes',
+)
+
+for f in find_files('mypy', prefix='test', suffix='.py'):
+    assert f in PYTEST_FILES + SLOW_FILES + MYUNIT_FILES, f
 
 
 def add_pytest(driver: Driver) -> None:
-    driver.add_pytest('pytest', PYTEST_FILES + driver.arglist + driver.pyt_arglist, True)
+    driver.add_pytest([('unit-test', name) for name in PYTEST_FILES] +
+                      [('integration', name) for name in SLOW_FILES])
 
 
 def add_myunit(driver: Driver) -> None:
-    for f in find_files('mypy', prefix='test', suffix='.py'):
+    for f in MYUNIT_FILES:
         mod = file_to_module(f)
-        if mod in ('mypy.test.testpythoneval', 'mypy.test.testcmdline'):
-            # Run Python evaluation integration tests and command-line
-            # parsing tests separately since they are much slower than
-            # proper unit tests.
-            pass
-        elif f in PYTEST_FILES:
-            # This module has been converted to pytest; don't try to use myunit.
-            pass
-        else:
-            driver.add_python_mod('unit-test %s' % mod, 'mypy.myunit', '-m', mod,
-                                  *driver.arglist, coverage=True)
-
-
-def add_pythoneval(driver: Driver) -> None:
-    cases = set()
-    case_re = re.compile(r'^\[case ([^\]]+)\]$')
-    for file in python_eval_files + python_34_eval_files:
-        with open(os.path.join(test_data_prefix, file), 'r') as f:
-            for line in f:
-                m = case_re.match(line)
-                if m:
-                    case_name = m.group(1)
-                    assert case_name[:4] == 'test'
-                    cases.add(case_name[4:5])
-
-    for prefix in sorted(cases):
-        driver.add_python_mod(
-            'eval-test-' + prefix,
-            'mypy.myunit',
-            '-m',
-            'mypy.test.testpythoneval',
-            'test_testpythoneval_PythonEvaluationSuite.test' + prefix + '*',
-            *driver.arglist,
-            coverage=True
-        )
-
-
-def add_cmdline(driver: Driver) -> None:
-    driver.add_python_mod('cmdline-test', 'mypy.myunit',
-                          '-m', 'mypy.test.testcmdline', *driver.arglist,
-                         coverage=True)
+        driver.add_python_mod('myunit unit-test %s' % mod, 'mypy.myunit', '-m', mod,
+                              *driver.arglist, coverage=True)
 
 
 def add_stubs(driver: Driver) -> None:
@@ -305,8 +295,12 @@ def usage(status: int) -> None:
     print()
     print('Examples:')
     print('  %s unit-test  (run unit tests only)' % sys.argv[0])
-    print('  %s unit-test -a "*tuple*"' % sys.argv[0])
-    print('       (run all unit tests with "tuple" in test name)')
+    print('  %s testcheck  (run type checking unit tests only)' % sys.argv[0])
+    print('  %s "pytest unit-test" -a -k -a Tuple' % sys.argv[0])
+    print('       (run all pytest unit tests with "Tuple" in test name)')
+    print()
+    print('You can also run pytest directly without using %s:' % sys.argv[0])
+    print('  pytest mypy/test/testcheck.py -k Tuple')
     print()
     print('Options:')
     print('  -h, --help             show this help')
@@ -430,8 +424,6 @@ def main() -> None:
 
     driver.add_flake8()
     add_pytest(driver)
-    add_pythoneval(driver)
-    add_cmdline(driver)
     add_basic(driver)
     add_selftypecheck(driver)
     add_myunit(driver)
diff --git a/scripts/__pycache__/dumpmodule.cpython-36.pyc b/scripts/__pycache__/dumpmodule.cpython-36.pyc
deleted file mode 100644
index 52e88ed..0000000
Binary files a/scripts/__pycache__/dumpmodule.cpython-36.pyc and /dev/null differ
diff --git a/scripts/myunit b/scripts/myunit
new file mode 100755
index 0000000..43fce06
--- /dev/null
+++ b/scripts/myunit
@@ -0,0 +1,9 @@
+#!/usr/bin/env python3
+"""Myunit test runner command line tool.
+
+Usually used as a slave by runtests.py, but can be used directly.
+"""
+
+from mypy.myunit import main
+
+main()
diff --git a/setup.cfg b/setup.cfg
index c3b5f76..38dde48 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -26,11 +26,6 @@ parallel = true
 [coverage:report]
 show_missing = true
 
-[metadata]
-requires-dist = 
-	typed-ast >= 1.0.4, < 1.1.0
-	typing >= 3.5.3; python_version < "3.5"
-
 [egg_info]
 tag_build = 
 tag_date = 0
diff --git a/setup.py b/setup.py
index 77c3a3b..64d5fac 100644
--- a/setup.py
+++ b/setup.py
@@ -78,7 +78,7 @@ data_files += find_data_files('typeshed', ['*.py', '*.pyi'])
 data_files += find_data_files('xml', ['*.xsd', '*.xslt', '*.css'])
 
 classifiers = [
-    'Development Status :: 2 - Pre-Alpha',
+    'Development Status :: 3 - Alpha',
     'Environment :: Console',
     'Intended Audience :: Developers',
     'License :: OSI Approved :: MIT License',
@@ -91,19 +91,6 @@ classifiers = [
     'Topic :: Software Development',
 ]
 
-
-package_dir = {'mypy': 'mypy'}
-
-
-# These requirements are used when installing by other means than bdist_wheel.
-# E.g. "pip3 install ." or
-# "pip3 install git+git://github.com/python/mypy.git"
-# (as suggested by README.md).
-install_requires = []
-install_requires.append('typed-ast >= 1.0.4, < 1.1.0')
-if sys.version_info < (3, 5):
-    install_requires.append('typing >= 3.5.3')
-
 setup(name='mypy',
       version=version,
       description=description,
@@ -113,13 +100,15 @@ setup(name='mypy',
       url='http://www.mypy-lang.org/',
       license='MIT License',
       platforms=['POSIX'],
-      package_dir=package_dir,
       py_modules=[],
-      packages=['mypy'],
+      packages=['mypy', 'mypy.test', 'mypy.myunit', 'mypy.server'],
       entry_points={'console_scripts': ['mypy=mypy.__main__:console_entry',
                                         'stubgen=mypy.stubgen:main']},
       data_files=data_files,
       classifiers=classifiers,
       cmdclass={'build_py': CustomPythonBuild},
-      install_requires=install_requires,
+      install_requires = ['typed-ast >= 1.1.0, < 1.2.0'],
+      extras_require = {
+          ':python_version < "3.5"': 'typing >= 3.5.3',
+      },
       )
diff --git a/test-data/unit/README.md b/test-data/unit/README.md
index 693e7f4..c464453 100644
--- a/test-data/unit/README.md
+++ b/test-data/unit/README.md
@@ -93,7 +93,9 @@ To run all tests, run the script `runtests.py` in the mypy repository:
 Note that some tests will be disabled for older python versions.
 
 This will run all tests, including integration and regression tests,
-and will type check mypy and verify that all stubs are valid.
+and will type check mypy and verify that all stubs are valid. This may
+take several minutes to run, so you don't want to use this all the time
+while doing development.
 
 You can run a subset of test suites by passing positive or negative
 filters:
@@ -102,20 +104,25 @@ filters:
 
 For example, to run unit tests only, which run pretty quickly:
 
-    $ ./runtests.py unit-test pytest
+    $ ./runtests.py unit-test
 
-The unit test suites are driven by a mixture of test frameworks: mypy's own
-`myunit` framework, and `pytest`, which we're in the process of migrating to.
-Test suites for individual components are in the files `mypy/test/test*.py`.
-You can run many of these individually by doing `runtests.py testfoobar`. For
-finer control over which unit tests are run and how, you can run `py.test` or
-`scripts/myunit` directly, or pass inferior arguments via `-a`:
+You can get a list of available test suites through the `-l` option
+(though this doesn't show all available subtasks):
+
+    $ ./runtests.py -l
+
+The unit test suites are driven by a mixture of test frameworks: `pytest` and
+mypy's own `myunit` framework, which we're in the process of migrating away
+from. Test suites for individual components are in the files
+`mypy/test/test*.py`. You can run many of these individually by doing
+`runtests.py testfoobar`. For finer control over which unit tests are run and
+how, you can run `pytest` directly:
 
     $ py.test mypy/test/testcheck.py -v -k MethodCall
-    $ ./runtests.py -v 'pytest mypy/test/testcheck' -a -v -a -k -a MethodCall
 
-    $ PYTHONPATH=$PWD scripts/myunit -m mypy.test.testlex -v '*backslash*'
-    $ ./runtests.py mypy.test.testlex -a -v -a '*backslash*'
+You can pass inferior arguments to pytest via `-a` when using `runtests.py`:
+
+    $ ./runtests.py pytest -a -v -a -k -a MethodCall
 
 You can also run the type checker for manual testing without
 installing it by setting up the Python module search path suitably:
@@ -163,8 +170,7 @@ the number of processes to use. The default (set in `./pytest.ini`) is the
 number of logical cores; this can be overridden using `-n` option.
 
 Note that running more processes than logical cores is likely to
-significantly decrease performance; the relevant count is the number of
-processes used by `runtests.py` plus those used by `pytest`.
+significantly decrease performance.
 
 
 Coverage reports
diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test
index 9a0d4af..0d0925d 100644
--- a/test-data/unit/check-abstract.test
+++ b/test-data/unit/check-abstract.test
@@ -174,8 +174,8 @@ def f(cls: Type[A]) -> A:
 def g() -> A:
     return A()  # E: Cannot instantiate abstract class 'A' with abstract attribute 'm'
 
-f(A)  # E: Only non-abstract class can be given where 'Type[__main__.A]' is expected
-f(B)  # E: Only non-abstract class can be given where 'Type[__main__.A]' is expected
+f(A)  # E: Only concrete class can be given where "Type[A]" is expected
+f(B)  # E: Only concrete class can be given where "Type[A]" is expected
 f(C)  # OK
 x: Type[B]
 f(x)  # OK
@@ -200,7 +200,7 @@ Alias = A
 GoodAlias = C
 Alias()  # E: Cannot instantiate abstract class 'A' with abstract attribute 'm'
 GoodAlias()
-f(Alias)  # E: Only non-abstract class can be given where 'Type[__main__.A]' is expected
+f(Alias)  # E: Only concrete class can be given where "Type[A]" is expected
 f(GoodAlias)
 [out]
 
@@ -218,14 +218,14 @@ class C(B):
 
 var: Type[A]
 var()
-var = A # E: Can only assign non-abstract classes to a variable of type 'Type[__main__.A]'
-var = B # E: Can only assign non-abstract classes to a variable of type 'Type[__main__.A]'
+var = A # E: Can only assign concrete classes to a variable of type "Type[A]"
+var = B # E: Can only assign concrete classes to a variable of type "Type[A]"
 var = C # OK
 
 var_old = None # type: Type[A] # Old syntax for variable annotations
 var_old()
-var_old = A # E: Can only assign non-abstract classes to a variable of type 'Type[__main__.A]'
-var_old = B # E: Can only assign non-abstract classes to a variable of type 'Type[__main__.A]'
+var_old = A # E: Can only assign concrete classes to a variable of type "Type[A]"
+var_old = B # E: Can only assign concrete classes to a variable of type "Type[A]"
 var_old = C # OK
 [out]
 
@@ -728,13 +728,10 @@ class A(metaclass=ABCMeta):
     def x(self) -> int: pass
 class B(A):
     @property
-    def x(self) -> str: pass # E
+    def x(self) -> str: pass # E: Return type of "x" incompatible with supertype "A"
 b = B()
-b.x() # E
+b.x() # E: "str" not callable
 [builtins fixtures/property.pyi]
-[out]
-main:7: error: Return type of "x" incompatible with supertype "A"
-main:9: error: "str" not callable
 
 [case testCantImplementAbstractPropertyViaInstanceVariable]
 from abc import abstractproperty, ABCMeta
diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test
index f8ac01d..e243bd5 100644
--- a/test-data/unit/check-async-await.test
+++ b/test-data/unit/check-async-await.test
@@ -93,7 +93,7 @@ async def f() -> int:
     return x
 [typing fixtures/typing-full.pyi]
 [out]
-main:7: error: Incompatible types in await (actual type Generator[int, None, str], expected type Awaitable[Any])
+main:7: error: Incompatible types in "await" (actual type "Generator[int, None, str]", expected type "Awaitable[Any]")
 
 [case testAwaitIteratorError]
 
@@ -105,7 +105,7 @@ async def f() -> int:
     return x
 [typing fixtures/typing-full.pyi]
 [out]
-main:6: error: Incompatible types in await (actual type Iterator[Any], expected type Awaitable[Any])
+main:6: error: Incompatible types in "await" (actual type "Iterator[Any]", expected type "Awaitable[Any]")
 
 [case testAwaitArgumentError]
 
@@ -117,7 +117,7 @@ async def f() -> int:
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-full.pyi]
 [out]
-main:5: error: Incompatible types in await (actual type "int", expected type Awaitable[Any])
+main:5: error: Incompatible types in "await" (actual type "int", expected type "Awaitable[Any]")
 
 [case testAwaitResultError]
 
@@ -164,7 +164,7 @@ async def f() -> None:
 [typing fixtures/typing-full.pyi]
 [out]
 main:4: error: AsyncIterable expected
-main:4: error: List[int] has no attribute "__aiter__"
+main:4: error: "List[int]" has no attribute "__aiter__"
 
 [case testAsyncForTypeComments]
 
@@ -248,13 +248,13 @@ async def wrong_iterable(obj: Iterable[int]):
 
 [out]
 main:18: error: AsyncIterable expected
-main:18: error: Iterable[int] has no attribute "__aiter__"; maybe "__iter__"?
+main:18: error: "Iterable[int]" has no attribute "__aiter__"; maybe "__iter__"?
 main:19: error: Iterable expected
-main:19: error: asyncify[int] has no attribute "__iter__"; maybe "__aiter__"?
+main:19: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"?
 main:20: error: AsyncIterable expected
-main:20: error: Iterable[int] has no attribute "__aiter__"; maybe "__iter__"?
+main:20: error: "Iterable[int]" has no attribute "__aiter__"; maybe "__iter__"?
 main:21: error: Iterable expected
-main:21: error: asyncify[int] has no attribute "__iter__"; maybe "__aiter__"?
+main:21: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"?
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-full.pyi]
 
@@ -290,7 +290,7 @@ class C:
     def __aenter__(self) -> int: pass
     async def __aexit__(self, x, y, z) -> None: pass
 async def f() -> None:
-    async with C() as x:  # E: Incompatible types in "async with" for __aenter__ (actual type "int", expected type Awaitable[Any])
+    async with C() as x:  # E: Incompatible types in "async with" for "__aenter__" (actual type "int", expected type "Awaitable[Any]")
         pass
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-full.pyi]
@@ -301,7 +301,7 @@ class C:
     def __aenter__(self) -> None: pass
     async def __aexit__(self, x, y, z) -> None: pass
 async def f() -> None:
-    async with C() as x:  # E: None has no attribute "__await__"
+    async with C() as x:  # E: "None" has no attribute "__await__"
         pass
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-full.pyi]
@@ -312,7 +312,7 @@ class C:
     async def __aenter__(self) -> int: pass
     def __aexit__(self, x, y, z) -> int: pass
 async def f() -> None:
-    async with C() as x: # E: Incompatible types in "async with" for __aexit__ (actual type "int", expected type Awaitable[Any])
+    async with C() as x: # E: Incompatible types in "async with" for "__aexit__" (actual type "int", expected type "Awaitable[Any]")
         pass
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-full.pyi]
@@ -323,7 +323,7 @@ class C:
     async def __aenter__(self) -> int: pass
     def __aexit__(self, x, y, z) -> None: pass
 async def f() -> None:
-    async with C() as x: # E: None has no attribute "__await__"
+    async with C() as x: # E: "None" has no attribute "__await__"
         pass
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-full.pyi]
@@ -383,7 +383,7 @@ def g() -> Generator[Any, None, str]:
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-full.pyi]
 [out]
-main:6: error: "yield from" can't be applied to Awaitable[str]
+main:6: error: "yield from" can't be applied to "Awaitable[str]"
 
 [case testAwaitableSubclass]
 
@@ -419,7 +419,7 @@ from types import coroutine
 @coroutine
 def f() -> Generator[int, str, int]:
     x = yield 0
-    x = yield ''  # E: Incompatible types in yield (actual type "str", expected type "int")
+    x = yield ''  # E: Incompatible types in "yield" (actual type "str", expected type "int")
     reveal_type(x)  # E: Revealed type is 'builtins.str'
     if x:
         return 0
@@ -443,7 +443,7 @@ async def g() -> AsyncGenerator[int, None]:
     reveal_type(value)  # E: Revealed type is 'builtins.int*'
     yield value
 
-    yield 'not an int'  # E: Incompatible types in yield (actual type "str", expected type "int")
+    yield 'not an int'  # E: Incompatible types in "yield" (actual type "str", expected type "int")
     # return without a value is fine
     return
 reveal_type(g)  # E: Revealed type is 'def () -> typing.AsyncGenerator[builtins.int, builtins.None]'
@@ -466,7 +466,7 @@ from typing import AsyncIterator
 async def gen() -> AsyncIterator[int]:
     yield 3
 
-    yield 'not an int'  # E: Incompatible types in yield (actual type "str", expected type "int")
+    yield 'not an int'  # E: Incompatible types in "yield" (actual type "str", expected type "int")
 
 async def use_gen() -> None:
     async for item in gen():
@@ -550,7 +550,7 @@ def h() -> None:
 
 [out]
 main:9: error: Iterable expected
-main:9: error: AsyncGenerator[int, None] has no attribute "__iter__"; maybe "__aiter__"?
+main:9: error: "AsyncGenerator[int, None]" has no attribute "__iter__"; maybe "__aiter__"?
 
 [case testAsyncGeneratorNoYieldFrom]
 # flags: --fast-parser --python-version 3.6
@@ -636,19 +636,19 @@ def plain_host_generator() -> Generator[str, None, None]:
     yield 'a'
     x = 0
     x = yield from plain_generator()
-    x = yield from plain_coroutine()  # E: "yield from" can't be applied to Awaitable[int]
+    x = yield from plain_coroutine()  # E: "yield from" can't be applied to "Awaitable[int]"
     x = yield from decorated_generator()
-    x = yield from decorated_coroutine()  # E: "yield from" can't be applied to AwaitableGenerator[Any, Any, int, Awaitable[int]]
+    x = yield from decorated_coroutine()  # E: "yield from" can't be applied to "AwaitableGenerator[Any, Any, int, Awaitable[int]]"
     x = yield from other_iterator()
     x = yield from other_coroutine()  # E: "yield from" can't be applied to "Aw"
 
 async def plain_host_coroutine() -> None:
     x = 0
-    x = await plain_generator()  # E: Incompatible types in await (actual type Generator[str, None, int], expected type Awaitable[Any])
+    x = await plain_generator()  # E: Incompatible types in "await" (actual type "Generator[str, None, int]", expected type "Awaitable[Any]")
     x = await plain_coroutine()
     x = await decorated_generator()
     x = await decorated_coroutine()
-    x = await other_iterator()  # E: Incompatible types in await (actual type "It", expected type Awaitable[Any])
+    x = await other_iterator()  # E: Incompatible types in "await" (actual type "It", expected type "Awaitable[Any]")
     x = await other_coroutine()
 
 @coroutine
@@ -665,11 +665,11 @@ def decorated_host_generator() -> Generator[str, None, None]:
 @coroutine
 async def decorated_host_coroutine() -> None:
     x = 0
-    x = await plain_generator()  # E: Incompatible types in await (actual type Generator[str, None, int], expected type Awaitable[Any])
+    x = await plain_generator()  # E: Incompatible types in "await" (actual type "Generator[str, None, int]", expected type "Awaitable[Any]")
     x = await plain_coroutine()
     x = await decorated_generator()
     x = await decorated_coroutine()
-    x = await other_iterator()  # E: Incompatible types in await (actual type "It", expected type Awaitable[Any])
+    x = await other_iterator()  # E: Incompatible types in "await" (actual type "It", expected type "Awaitable[Any]")
     x = await other_coroutine()
 
 [builtins fixtures/async_await.pyi]
diff --git a/test-data/unit/check-bound.test b/test-data/unit/check-bound.test
index 4d6ede2..043148f 100644
--- a/test-data/unit/check-bound.test
+++ b/test-data/unit/check-bound.test
@@ -24,8 +24,8 @@ b = B()
 b = f(b)
 b = f(C()) # Fail
 [out]
-main:12: error: Type argument 1 of "f" has incompatible value "U"
-main:16: error: Type argument 1 of "f" has incompatible value "D"
+main:12: error: Value of type variable "T" of "f" cannot be "U"
+main:16: error: Value of type variable "T" of "f" cannot be "D"
 main:20: error: Incompatible types in assignment (expression has type "C", variable has type "B")
 
 
@@ -42,7 +42,7 @@ class G(Generic[T]):
 v = None # type: G[A]
 w = None # type: G[B]
 x = None # type: G[str] # E: Type argument "builtins.str" of "G" must be a subtype of "__main__.A"
-y = G('a') # E: Type argument 1 of "G" has incompatible value "str"
+y = G('a') # E: Value of type variable "T" of "G" cannot be "str"
 z = G(A())
 z = G(B())
 
@@ -125,7 +125,7 @@ def j(x: TA) -> A:
 def k(x: TA) -> B:
     return x # Fail
 [out]
-main:16: error: Type argument 1 of "h" has incompatible value "TA"
+main:16: error: Value of type variable "TB" of "h" cannot be "TA"
 main:21: error: Incompatible return value type (got "TA", expected "B")
 
 
@@ -199,5 +199,5 @@ def foo(x: int) -> int:
 a = 1
 b = foo(a)
 b = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-twice(a) # E: Type argument 1 of "twice" has incompatible value "int"
+twice(a) # E: Value of type variable "T" of "twice" cannot be "int"
 [builtins fixtures/args.pyi]
diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test
index 710f750..c40ceb5 100644
--- a/test-data/unit/check-class-namedtuple.test
+++ b/test-data/unit/check-class-namedtuple.test
@@ -221,7 +221,7 @@ class MyNamedTuple(NamedTuple):
     a: int
     b: str
 
-MyNamedTuple.x # E: Type[MyNamedTuple] has no attribute "x"
+MyNamedTuple.x # E: "Type[MyNamedTuple]" has no attribute "x"
 
 [case testNewNamedTupleEmptyItems]
 # flags: --python-version 3.6
@@ -416,7 +416,7 @@ class Parameterized(NamedTuple):
     z: List[int] = []
 
 reveal_type(Parameterized(1))  # E: Revealed type is 'Tuple[builtins.int, builtins.list[builtins.int], builtins.list[builtins.int], fallback=__main__.Parameterized]'
-Parameterized(1, ['not an int'])  # E: List item 0 has incompatible type "str"
+Parameterized(1, ['not an int'])  # E: List item 0 has incompatible type "str"; expected "int"
 
 class Default:
     pass
@@ -439,13 +439,13 @@ class HasNone(NamedTuple):
     y: Optional[int] = None
 
 reveal_type(HasNone(1))  # E: Revealed type is 'Tuple[builtins.int, Union[builtins.int, builtins.None], fallback=__main__.HasNone]'
-HasNone(None)  # E: Argument 1 to "HasNone" has incompatible type None; expected "int"
+HasNone(None)  # E: Argument 1 to "HasNone" has incompatible type "None"; expected "int"
 HasNone(1, y=None)
 HasNone(1, y=2)
 
 class CannotBeNone(NamedTuple):
     x: int
-    y: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+    y: int = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 
 [builtins fixtures/list.pyi]
 
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
index 0b4b0f5..7243de9 100644
--- a/test-data/unit/check-classes.test
+++ b/test-data/unit/check-classes.test
@@ -345,6 +345,136 @@ class A:
 class B(A):
     def __init_subclass__(cls) -> None: pass
 
+[case testOverrideWithDecorator]
+from typing import Callable
+
+def int_to_none(f: Callable[..., int]) -> Callable[..., None]: ...
+def str_to_int(f: Callable[..., str]) -> Callable[..., int]: ...
+
+class A:
+    def f(self) -> None: pass
+    def g(self) -> str: pass
+    def h(self) -> None: pass
+
+class B(A):
+    @int_to_none
+    def f(self) -> int: pass
+    @str_to_int
+    def g(self) -> str: pass # E: Signature of "g" incompatible with supertype "A"
+    @int_to_none
+    @str_to_int
+    def h(self) -> str: pass
+
+[case testOverrideDecorated]
+from typing import Callable
+
+def str_to_int(f: Callable[..., str]) -> Callable[..., int]: ...
+
+class A:
+    @str_to_int
+    def f(self) -> str: pass
+    @str_to_int
+    def g(self) -> str: pass
+    @str_to_int
+    def h(self) -> str: pass
+
+class B(A):
+    def f(self) -> int: pass
+    def g(self) -> str: pass # E: Signature of "g" incompatible with supertype "A"
+    @str_to_int
+    def h(self) -> str: pass
+
+[case testOverrideWithDecoratorReturningAny]
+def dec(f): pass
+
+class A:
+    def f(self) -> str: pass
+
+class B(A):
+    @dec
+    def f(self) -> int: pass
+
+[case testOverrideWithDecoratorReturningInstance]
+def dec(f) -> str: pass
+
+class A:
+    def f(self) -> str: pass
+    @dec
+    def g(self) -> int: pass
+    @dec
+    def h(self) -> int: pass
+
+class B(A):
+    @dec
+    def f(self) -> int: pass # E: Signature of "f" incompatible with supertype "A"
+    def g(self) -> int: pass # E: Signature of "g" incompatible with supertype "A"
+    @dec
+    def h(self) -> str: pass
+
+[case testOverrideStaticMethodWithStaticMethod]
+class A:
+    @staticmethod
+    def f(x: int, y: str) -> None: pass
+    @staticmethod
+    def g(x: int, y: str) -> None: pass
+
+class B(A):
+    @staticmethod
+    def f(x: int, y: str) -> None: pass
+    @staticmethod
+    def g(x: str, y: str) -> None: pass # E: Argument 1 of "g" incompatible with supertype "A"
+[builtins fixtures/classmethod.pyi]
+
+[case testOverrideClassMethodWithClassMethod]
+class A:
+    @classmethod
+    def f(cls, x: int, y: str) -> None: pass
+    @classmethod
+    def g(cls, x: int, y: str) -> None: pass
+
+class B(A):
+    @classmethod
+    def f(cls, x: int, y: str) -> None: pass
+    @classmethod
+    def g(cls, x: str, y: str) -> None: pass # E: Argument 1 of "g" incompatible with supertype "A"
+[builtins fixtures/classmethod.pyi]
+
+[case testOverrideClassMethodWithStaticMethod]
+class A:
+    @classmethod
+    def f(cls, x: int) -> None: pass
+    @classmethod
+    def g(cls, x: int) -> int: pass
+    @classmethod
+    def h(cls) -> int: pass
+
+class B(A):
+    @staticmethod
+    def f(x: int) -> None: pass
+    @staticmethod
+    def g(x: str) -> int: pass # E: Argument 1 of "g" incompatible with supertype "A"
+    @staticmethod
+    def h() -> int: pass
+[builtins fixtures/classmethod.pyi]
+
+[case testOverrideStaticMethodWithClassMethod]
+class A:
+    @staticmethod
+    def f(x: int) -> None: pass
+    @staticmethod
+    def g(x: str) -> int: pass
+    @staticmethod
+    def h() -> int: pass
+
+class B(A):
+    @classmethod
+    def f(cls, x: int) -> None: pass
+    @classmethod
+    def g(cls, x: int) -> int: pass # E: Argument 1 of "g" incompatible with supertype "A"
+    @classmethod
+    def h(cls) -> int: pass
+[builtins fixtures/classmethod.pyi]
+
 
 -- Constructors
 -- ------------
@@ -504,8 +634,8 @@ class A:
     h = f # type: Callable[[A], None]
     h = f
     g = h
-    ff = f # type: Callable[[B], None]  # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[B], None])
-    g = ff                # E: Incompatible types in assignment (expression has type Callable[[B], None], variable has type Callable[[A], None])
+    ff = f # type: Callable[[B], None]  # E: Incompatible types in assignment (expression has type "Callable[[A], None]", variable has type "Callable[[B], None]")
+    g = ff                # E: Incompatible types in assignment (expression has type "Callable[[B], None]", variable has type "Callable[[A], None]")
 [out]
 
 
@@ -551,7 +681,7 @@ b = A.x # type: B # E: Incompatible types in assignment (expression has type "A"
 [case testAccessingUndefinedAttributeViaClass]
 import typing
 class A: pass
-A.x # E: Type[A] has no attribute "x"
+A.x # E: "Type[A]" has no attribute "x"
 
 [case testAccessingUndefinedAttributeViaClassWithOverloadedInit]
 from foo import *
@@ -562,7 +692,7 @@ class A:
     def __init__(self): pass
     @overload
     def __init__(self, x): pass
-A.x # E: Type[A] has no attribute "x"
+A.x # E: "Type[A]" has no attribute "x"
 
 [case testAccessMethodOfClassWithOverloadedInit]
 from foo import *
@@ -864,7 +994,7 @@ class C:
     cls(1)      # E: Too many arguments for "C"
     cls.bar()
     cls.bar(1)  # E: Too many arguments for "bar" of "C"
-    cls.bozo()  # E: Type[C] has no attribute "bozo"
+    cls.bozo()  # E: "Type[C]" has no attribute "bozo"
 [builtins fixtures/classmethod.pyi]
 [out]
 
@@ -875,7 +1005,7 @@ class C:
   def foo(cls) -> None: pass
 C.foo()
 C.foo(1)  # E: Too many arguments for "foo" of "C"
-C.bozo()  # E: Type[C] has no attribute "bozo"
+C.bozo()  # E: "Type[C]" has no attribute "bozo"
 [builtins fixtures/classmethod.pyi]
 
 [case testClassMethodCalledOnInstance]
@@ -885,7 +1015,7 @@ class C:
   def foo(cls) -> None: pass
 C().foo()
 C().foo(1)  # E: Too many arguments for "foo" of "C"
-C.bozo()    # E: Type[C] has no attribute "bozo"
+C.bozo()    # E: "Type[C]" has no attribute "bozo"
 [builtins fixtures/classmethod.pyi]
 
 [case testClassMethodMayCallAbstractMethod]
@@ -1330,7 +1460,7 @@ class D:
     def __get__(self, inst: Any, own: str) -> Any: pass
 class A:
     f = D()
-A().f  # E: Argument 2 to "__get__" of "D" has incompatible type Type[A]; expected "str"
+A().f  # E: Argument 2 to "__get__" of "D" has incompatible type "Type[A]"; expected "str"
 
 [case testDescriptorGetSetDifferentTypes]
 from typing import Any
@@ -1853,7 +1983,7 @@ class C:
 def f(x: type) -> None: pass
 def g(x: int) -> None: pass
 f(C)
-g(C) # E: Argument 1 to "g" has incompatible type Type[C]; expected "int"
+g(C) # E: Argument 1 to "g" has incompatible type "Type[C]"; expected "int"
 [builtins fixtures/__new__.pyi]
 
 [case testClassWith__new__AndCompatibilityWithType2]
@@ -1864,7 +1994,7 @@ class C:
 def f(x: type) -> None: pass
 def g(x: int) -> None: pass
 f(C)
-g(C) # E: Argument 1 to "g" has incompatible type Type[C]; expected "int"
+g(C) # E: Argument 1 to "g" has incompatible type "Type[C]"; expected "int"
 [builtins fixtures/__new__.pyi]
 
 [case testGenericClassWith__new__]
@@ -1944,7 +2074,7 @@ class B:
 [case testClassVsInstanceDisambiguation]
 class A: pass
 def f(x: A) -> None: pass
-f(A) # E: Argument 1 to "f" has incompatible type Type[A]; expected "A"
+f(A) # E: Argument 1 to "f" has incompatible type "Type[A]"; expected "A"
 [out]
 
 -- TODO
@@ -2118,7 +2248,7 @@ def error(u_c: Type[U]) -> P:
 [out]
 main:11: error: Revealed type is '__main__.WizUser*'
 main:13: error: Incompatible return value type (got "U", expected "P")
-main:13: error: Type argument 1 of "new_pro" has incompatible value "U"
+main:13: error: Value of type variable "P" of "new_pro" cannot be "U"
 
 [case testTypeUsingTypeCCovariance]
 from typing import Type, TypeVar
@@ -2151,7 +2281,7 @@ class User: pass
 def new_user(user_class: Type[User]):
     return user_class()
 def foo(arg: Type[int]):
-    new_user(arg)  # E: Argument 1 to "new_user" has incompatible type Type[int]; expected Type[User]
+    new_user(arg)  # E: Argument 1 to "new_user" has incompatible type "Type[int]"; expected "Type[User]"
 [out]
 
 [case testTypeUsingTypeCUnionOverload]
@@ -2190,7 +2320,7 @@ def foo(arg: Type[Any]):
     # Member access is ok and types as Any
     reveal_type(x)  # E: Revealed type is 'Any'
     # But Type[Any] is distinct from Any
-    y: int = arg  # E: Incompatible types in assignment (expression has type Type[Any], variable has type "int")
+    y: int = arg  # E: Incompatible types in assignment (expression has type "Type[Any]", variable has type "int")
 [out]
 
 [case testTypeUsingTypeCTypeAnyMemberFallback]
@@ -2231,7 +2361,7 @@ def process(cls: Type[User]):
     obj = cls()
     reveal_type(cls.bar(obj))  # E: Revealed type is 'builtins.int'
     cls.mro()  # Defined in class type
-    cls.error  # E: Type[User] has no attribute "error"
+    cls.error  # E: "Type[User]" has no attribute "error"
 [builtins fixtures/classmethod.pyi]
 [out]
 
@@ -2264,7 +2394,7 @@ def process(cls: Type[U]):
     obj = cls()
     reveal_type(cls.bar(obj))  # E: Revealed type is 'builtins.int'
     cls.mro()  # Defined in class type
-    cls.error  # E: Type[U] has no attribute "error"
+    cls.error  # E: "Type[U]" has no attribute "error"
 [builtins fixtures/classmethod.pyi]
 [out]
 
@@ -2279,11 +2409,11 @@ class ProUser(User): pass
 class BasicUser(User): pass
 U = TypeVar('U', bound=Union[ProUser, BasicUser])
 def process(cls: Type[U]):
-    cls.foo()  # E: Type[U] has no attribute "foo"
+    cls.foo()  # E: "Type[U]" has no attribute "foo"
     obj = cls()
-    cls.bar(obj)  # E: Type[U] has no attribute "bar"
+    cls.bar(obj)  # E: "Type[U]" has no attribute "bar"
     cls.mro()  # Defined in class type
-    cls.error  # E: Type[U] has no attribute "error"
+    cls.error  # E: "Type[U]" has no attribute "error"
 [builtins fixtures/classmethod.pyi]
 [out]
 
@@ -2751,7 +2881,7 @@ int.__eq__(3, 4)
 [builtins fixtures/args.pyi]
 [out]
 main:33: error: Too few arguments for "__eq__" of "int"
-main:33: error: Unsupported operand types for == ("int" and Type[int])
+main:33: error: Unsupported operand types for == ("int" and "Type[int]")
 
 [case testMroSetAfterError]
 class C(str, str):
@@ -2884,7 +3014,7 @@ class B(A[int]):
     b = ['']
 [builtins fixtures/list.pyi]
 [out]
-main:8: error: List item 0 has incompatible type "str"
+main:8: error: List item 0 has incompatible type "str"; expected "int"
 
 [case testVariableMethod]
 class A:
@@ -2894,7 +3024,7 @@ class B(A):
     a = 1
     def b(self) -> None: pass
 [out]
-main:5: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as Callable[[A], None])
+main:5: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "Callable[[A], None]")
 main:6: error: Signature of "b" incompatible with supertype "A"
 
 [case testVariableProperty]
@@ -2934,7 +3064,7 @@ class C(B):
     def m(self, a: str) -> None: pass
     n = m
 [out]
-main:5: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "B" defined the type as Callable[[int], None])
+main:5: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "B" defined the type as "Callable[[int], None]")
 
 [case testInstanceMethodOverwriteTypevar]
 from typing import Generic, TypeVar
@@ -2978,7 +3108,7 @@ class C(B):
     n = m
 [builtins fixtures/classmethod.pyi]
 [out]
-main:7: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "B" defined the type as Callable[[int], None])
+main:7: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "B" defined the type as "Callable[[int], None]")
 
 [case testClassSpec]
 from typing import Callable
@@ -2996,7 +3126,7 @@ class B(A):
     def c(self, a: str) -> int: pass
     b = c
 [out]
-main:6: error: Incompatible types in assignment (expression has type Callable[[str], int], base class "A" defined the type as Callable[[int], int])
+main:6: error: Incompatible types in assignment (expression has type "Callable[[str], int]", base class "A" defined the type as "Callable[[int], int]")
 
 [case testClassStaticMethod]
 class A():
@@ -3008,7 +3138,7 @@ class B(A):
     a = b
 [builtins fixtures/staticmethod.pyi]
 [out]
-main:7: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "A" defined the type as Callable[[int], None])
+main:7: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "A" defined the type as "Callable[[int], None]")
 
 [case testClassStaticMethodIndirect]
 class A():
@@ -3021,7 +3151,7 @@ class B(A):
     c = b
 [builtins fixtures/staticmethod.pyi]
 [out]
-main:8: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "A" defined the type as Callable[[int], None])
+main:8: error: Incompatible types in assignment (expression has type "Callable[[str], None]", base class "A" defined the type as "Callable[[int], None]")
 
 [case testClassStaticMethodSubclassing]
 class A:
@@ -3116,7 +3246,7 @@ class M:
 
 class A(metaclass=M): pass  # E: Metaclasses not inheriting from 'type' are not supported
 
-A.x  # E: Type[A] has no attribute "x"
+A.x  # E: "Type[A]" has no attribute "x"
 
 [case testMetaclassTypeReveal]
 from typing import Type
@@ -3152,19 +3282,20 @@ def f(TB: Type[B]):
 [case testMetaclassIterable]
 from typing import Iterable, Iterator
 
-class BadMeta(type):
+class ImplicitMeta(type):
     def __iter__(self) -> Iterator[int]: yield 1
 
-class Bad(metaclass=BadMeta): pass
+class Implicit(metaclass=ImplicitMeta): pass
 
-for _ in Bad: pass  # E: Iterable expected
+for _ in Implicit: pass
+reveal_type(list(Implicit))  # E: Revealed type is 'builtins.list[builtins.int*]'
 
-class GoodMeta(type, Iterable[int]):
+class ExplicitMeta(type, Iterable[int]):
     def __iter__(self) -> Iterator[int]: yield 1
 
-class Good(metaclass=GoodMeta): pass
-for _ in Good: pass
-reveal_type(list(Good))  # E: Revealed type is 'builtins.list[builtins.int*]'
+class Explicit(metaclass=ExplicitMeta): pass
+for _ in Explicit: pass
+reveal_type(list(Explicit))  # E: Revealed type is 'builtins.list[builtins.int*]'
 
 [builtins fixtures/list.pyi]
 
@@ -3187,7 +3318,7 @@ class Concrete(metaclass=Meta):
     pass
 
 reveal_type(Concrete + X())  # E: Revealed type is 'builtins.str'
-Concrete + "hello"  # E: Unsupported operand types for + (Type[Concrete] and "str")
+Concrete + "hello"  # E: Unsupported operand types for + ("Type[Concrete]" and "str")
 
 [case testMetaclassGetitem]
 class M(type):
@@ -3215,7 +3346,7 @@ from missing import M
 class A(metaclass=M):
     y = 0
 reveal_type(A.y) # E: Revealed type is 'builtins.int'
-A.x # E: Type[A] has no attribute "x"
+A.x # E: "Type[A]" has no attribute "x"
 
 [case testAnyMetaclass]
 from typing import Any
@@ -3223,7 +3354,7 @@ M = None  # type: Any
 class A(metaclass=M):
     y = 0
 reveal_type(A.y) # E: Revealed type is 'builtins.int'
-A.x # E: Type[A] has no attribute "x"
+A.x # E: "Type[A]" has no attribute "x"
 
 [case testInvalidVariableAsMetaclass]
 from typing import Any
@@ -3234,7 +3365,7 @@ class A(metaclass=M): # E: Invalid metaclass 'M'
 class B(metaclass=MM): # E: Invalid metaclass 'MM'
     y = 0
 reveal_type(A.y) # E: Revealed type is 'builtins.int'
-A.x # E: Type[A] has no attribute "x"
+A.x # E: "Type[A]" has no attribute "x"
 
 [case testAnyAsBaseOfMetaclass]
 from typing import Any, Type
@@ -3249,7 +3380,7 @@ class A(metaclass=MM):
 
 def h(a: Type[A], b: Type[object]) -> None:
     h(a, a)
-    h(b, a) # E: Argument 1 to "h" has incompatible type Type[object]; expected Type[A]
+    h(b, a) # E: Argument 1 to "h" has incompatible type "Type[object]"; expected "Type[A]"
     a.f(1) # E: Too many arguments for "f" of "A"
     reveal_type(a.y) # E: Revealed type is 'builtins.int'
 
@@ -3291,14 +3422,14 @@ reveal_type(A.g4)  # E: Revealed type is 'def () -> def () -> __main__.A'
 class B(metaclass=M):
     def foo(self): pass
 
-B.g1  # Should be error: Argument 0 to "g1" of "M" has incompatible type "B"; expected Type[A]
-B.g2  # Should be error: Argument 0 to "g2" of "M" has incompatible type "B"; expected Type[TA]
+B.g1  # Should be error: Argument 0 to "g1" of "M" has incompatible type "B"; expected "Type[A]"
+B.g2  # Should be error: Argument 0 to "g2" of "M" has incompatible type "B"; expected "Type[TA]"
 B.g3  # Should be error: Argument 0 to "g3" of "M" has incompatible type "B"; expected "TTA"
 reveal_type(B.g4)  # E: Revealed type is 'def () -> def () -> __main__.B'
 
 # 4 examples of unsoundness - instantiation, classmethod, staticmethod and ClassVar:
 
-ta: Type[A] = m  # E: Incompatible types in assignment (expression has type "M", variable has type Type[A])
+ta: Type[A] = m  # E: Incompatible types in assignment (expression has type "M", variable has type "Type[A]")
 a: A = ta()
 reveal_type(ta.g1)  # E: Revealed type is 'def () -> __main__.A'
 reveal_type(ta.g2)  # E: Revealed type is 'def () -> __main__.A*'
@@ -3306,8 +3437,8 @@ reveal_type(ta.g3)  # E: Revealed type is 'def () -> Type[__main__.A]'
 reveal_type(ta.g4)  # E: Revealed type is 'def () -> Type[__main__.A]'
 
 x: M = ta
-x.g1  # should be error: Argument 0 to "g1" of "M" has incompatible type "M"; expected Type[A]
-x.g2  # should be error: Argument 0 to "g2" of "M" has incompatible type "M"; expected Type[TA]
+x.g1  # should be error: Argument 0 to "g1" of "M" has incompatible type "M"; expected "Type[A]"
+x.g2  # should be error: Argument 0 to "g2" of "M" has incompatible type "M"; expected "Type[TA]"
 x.g3  # should be error: Argument 0 to "g3" of "M" has incompatible type "M"; expected "TTA"
 reveal_type(x.g4)  # E: Revealed type is 'def () -> __main__.M*'
 
@@ -3320,7 +3451,7 @@ class Class(metaclass=M):
     def f1(cls: Type[Class]) -> None: pass
     @classmethod
     def f2(cls: M) -> None: pass
-cl: Type[Class] = m  # E: Incompatible types in assignment (expression has type "M", variable has type Type[Class])
+cl: Type[Class] = m  # E: Incompatible types in assignment (expression has type "M", variable has type "Type[Class]")
 reveal_type(cl.f1)  # E: Revealed type is 'def ()'
 reveal_type(cl.f2)  # E: Revealed type is 'def ()'
 x1: M = cl
@@ -3328,14 +3459,14 @@ x1: M = cl
 class Static(metaclass=M):
     @staticmethod
     def f() -> None: pass
-s: Type[Static] = m  # E: Incompatible types in assignment (expression has type "M", variable has type Type[Static])
+s: Type[Static] = m  # E: Incompatible types in assignment (expression has type "M", variable has type "Type[Static]")
 reveal_type(s.f)  # E: Revealed type is 'def ()'
 x2: M = s
 
 from typing import ClassVar
 class Cvar(metaclass=M):
     x = 1  # type: ClassVar[int]
-cv: Type[Cvar] = m  # E: Incompatible types in assignment (expression has type "M", variable has type Type[Cvar])
+cv: Type[Cvar] = m  # E: Incompatible types in assignment (expression has type "M", variable has type "Type[Cvar]")
 cv.x
 x3: M = cv
 
@@ -3373,9 +3504,267 @@ reveal_type(f(e1t))  # E: Revealed type is '__main__.A'
 
 reveal_type(f(''))  # E: Revealed type is 'builtins.str'
 
+[case testTypeCErasesGenericsFromC]
+from typing import Generic, Type, TypeVar
+
+K = TypeVar('K')
+V = TypeVar('V')
+class ExampleDict(Generic[K, V]): ...
+
+D = TypeVar('D')
+def mkdict(dict_type: Type[D]) -> D: ...
+reveal_type(mkdict(ExampleDict))  # E: Revealed type is '__main__.ExampleDict*[Any, Any]'
+
+[case testTupleForwardBase]
+from m import a
+a[0]()  # E: "int" not callable
+
+[file m.py]
+from typing import Tuple
+a = None # type: A
+class A(Tuple[int, str]): pass
+[builtins fixtures/tuple.pyi]
+
 -- Synthetic types crashes
 -- -----------------------
 
+[case testCrashOnSelfRecursiveNamedTupleVar]
+from typing import NamedTuple
+
+N = NamedTuple('N', [('x', N)]) # E: Recursive types not fully supported yet, nested types replaced with "Any"
+n: N
+[out]
+
+[case testCrashOnSelfRecursiveTypedDictVar]
+from mypy_extensions import TypedDict
+
+A = TypedDict('A', {'a': 'A'})  # type: ignore
+a: A
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testCrashInJoinOfSelfRecursiveNamedTuples]
+from typing import NamedTuple
+
+class N(NamedTuple): # type: ignore
+    x: N
+class M(NamedTuple): # type: ignore
+    x: M
+
+n: N
+m: M
+lst = [n, m]
+[builtins fixtures/isinstancelist.pyi]
+
+[case testCorrectJoinOfSelfRecursiveTypedDicts]
+from mypy_extensions import TypedDict
+
+class N(TypedDict):
+    x: N
+class M(TypedDict):
+    x: M
+
+n: N
+m: M
+lst = [n, m]
+reveal_type(lst[0]['x'])  # E: Revealed type is 'TypedDict('__main__.N', {'x': Any})'
+[builtins fixtures/isinstancelist.pyi]
+[out]
+main:3: error: Recursive types not fully supported yet, nested types replaced with "Any"
+main:5: error: Recursive types not fully supported yet, nested types replaced with "Any"
+
+[case testCrashInForwardRefToNamedTupleWithIsinstance]
+from typing import Dict, NamedTuple
+
+NameDict = Dict[str, 'NameInfo']
+class NameInfo(NamedTuple):
+    ast: bool
+
+def parse_ast(name_dict: NameDict) -> None:
+    if isinstance(name_dict[''], int):
+        pass
+    reveal_type(name_dict['test']) # E: Revealed type is 'Tuple[builtins.bool, fallback=__main__.NameInfo]'
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testCrashInForwardRefToTypedDictWithIsinstance]
+from mypy_extensions import TypedDict
+from typing import Dict
+
+NameDict = Dict[str, 'NameInfo']
+class NameInfo(TypedDict):
+    ast: bool
+
+def parse_ast(name_dict: NameDict) -> None:
+    if isinstance(name_dict[''], int):
+        pass
+    reveal_type(name_dict['']['ast'])  # E: Revealed type is 'builtins.bool'
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testCorrectIsinstanceInForwardRefToNewType]
+from typing import Dict, NewType
+
+NameDict = Dict[str, 'NameInfo']
+class Base:
+    ast: bool
+NameInfo = NewType('NameInfo', Base)
+
+def parse_ast(name_dict: NameDict) -> None:
+    if isinstance(name_dict[''], int):
+        pass
+    x = name_dict['']
+    reveal_type(x) # E: Revealed type is '__main__.NameInfo*'
+    x = NameInfo(Base()) # OK
+    x = Base() # E: Incompatible types in assignment (expression has type "Base", variable has type "NameInfo")
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testCorrectAttributeInForwardRefToNamedTuple]
+from typing import NamedTuple
+proc: Process
+reveal_type(proc.state)  # E: Revealed type is 'builtins.int'
+
+def get_state(proc: 'Process') -> int:
+    return proc.state
+class Process(NamedTuple):
+     state: int
+[out]
+
+[case testCorrectItemTypeInForwardRefToTypedDict]
+from mypy_extensions import TypedDict
+proc: Process
+reveal_type(proc['state'])  # E: Revealed type is 'builtins.int'
+
+def get_state(proc: 'Process') -> int:
+    return proc['state']
+class Process(TypedDict):
+     state: int
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testCorrectDoubleForwardNamedTuple]
+from typing import NamedTuple
+
+x: A
+class A(NamedTuple):
+    one: 'B'
+    other: int
+class B(NamedTuple):
+    attr: str
+y: A
+y = x
+reveal_type(x.one.attr)  # E: Revealed type is 'builtins.str'
+[out]
+
+[case testCrashOnDoubleForwardTypedDict]
+from mypy_extensions import TypedDict
+
+x: A
+class A(TypedDict):
+    one: 'B'
+    other: int
+class B(TypedDict):
+    attr: str
+
+reveal_type(x['one']['attr'])  # E: Revealed type is 'builtins.str'
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testCrashOnForwardUnionOfNamedTuples]
+from typing import Union, NamedTuple
+
+Node = Union['Foo', 'Bar']
+class Foo(NamedTuple):
+    x: int
+class Bar(NamedTuple):
+    x: int
+
+def foo(node: Node) -> int:
+    x = node
+    reveal_type(node) # E: Revealed type is 'Union[Tuple[builtins.int, fallback=__main__.Foo], Tuple[builtins.int, fallback=__main__.Bar]]'
+    return x.x
+[out]
+
+[case testCrashOnForwardUnionOfTypedDicts]
+from mypy_extensions import TypedDict
+from typing import Union
+
+NodeType = Union['Foo', 'Bar']
+class Foo(TypedDict):
+    x: int
+class Bar(TypedDict):
+    x: int
+
+def foo(node: NodeType) -> int:
+    x = node
+    return x['x']
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testSupportForwardUnionOfNewTypes]
+from typing import Union, NewType
+x: Node
+reveal_type(x.x) # E: Revealed type is 'builtins.int'
+
+class A:
+    x: int
+class B:
+    x: int
+
+Node = Union['Foo', 'Bar']
+Foo = NewType('Foo', A)
+Bar = NewType('Bar', B)
+
+def foo(node: Node) -> Node:
+    x = node
+    return Foo(A())
+[out]
+
+[case testForwardReferencesInNewTypeMRORecomputed]
+from typing import NewType
+x: Foo
+Foo = NewType('Foo', B)
+class A:
+    x: int
+class B(A):
+    pass
+
+reveal_type(x.x) # E: Revealed type is 'builtins.int'
+[out]
+
+[case testCrashOnComplexNamedTupleUnionProperty]
+from typing import NamedTuple, Union
+
+x: AOrB
+AOrB = Union['A', 'B']
+class A(NamedTuple):
+    x: int
+
+class B(object):
+    def __init__(self, a: AOrB) -> None:
+        self.a = a
+    @property
+    def x(self) -> int:
+        return self.a.x
+
+reveal_type(x.x) # E: Revealed type is 'builtins.int'
+[builtins fixtures/property.pyi]
+[out]
+
+[case testCorrectIsinstanceWithForwardUnion]
+from typing import Union, NamedTuple
+
+ForwardUnion = Union['TP', int]
+class TP(NamedTuple('TP', [('x', int)])): pass
+
+def f(x: ForwardUnion) -> None:
+  reveal_type(x)  # E: Revealed type is 'Union[Tuple[builtins.int, fallback=__main__.TP], builtins.int]'
+  if isinstance(x, TP):
+    reveal_type(x)  # E: Revealed type is 'Tuple[builtins.int, fallback=__main__.TP]'
+[builtins fixtures/isinstance.pyi]
+[out]
+
 [case testCrashInvalidArgsSyntheticClassSyntax]
 from typing import List, NamedTuple
 from mypy_extensions import TypedDict
@@ -3458,64 +3847,94 @@ reveal_type(y['b']) # E: Revealed type is '__main__.B'
 -- Special support for six
 -- -----------------------
 
-[case testSixWithMetaclass]
+[case testSixMetaclass]
 import six
 class M(type):
     x = 5
 class A(six.with_metaclass(M)): pass
+ at six.add_metaclass(M)
+class B: pass
 reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+reveal_type(type(B).x)  # E: Revealed type is 'builtins.int'
 
-[case testSixWithMetaclass_python2]
+[case testSixMetaclass_python2]
 import six
 class M(type):
     x = 5
 class A(six.with_metaclass(M)): pass
+ at six.add_metaclass(M)
+class B: pass
 reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+reveal_type(type(B).x)  # E: Revealed type is 'builtins.int'
 
-[case testFromSixWithMetaclass]
-from six import with_metaclass
+[case testFromSixMetaclass]
+from six import with_metaclass, add_metaclass
 class M(type):
     x = 5
 class A(with_metaclass(M)): pass
+ at add_metaclass(M)
+class B: pass
 reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+reveal_type(type(B).x)  # E: Revealed type is 'builtins.int'
 
-[case testSixWithMetaclassImportFrom]
+[case testSixMetaclassImportFrom]
 import six
 from metadefs import M
 class A(six.with_metaclass(M)): pass
+ at six.add_metaclass(M)
+class B: pass
 reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+reveal_type(type(B).x)  # E: Revealed type is 'builtins.int'
 [file metadefs.py]
 class M(type):
     x = 5
 
-[case testSixWithMetaclassImport]
+[case testSixMetaclassImport]
 import six
 import metadefs
 class A(six.with_metaclass(metadefs.M)): pass
+ at six.add_metaclass(metadefs.M)
+class B: pass
 reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+reveal_type(type(B).x)  # E: Revealed type is 'builtins.int'
 [file metadefs.py]
 class M(type):
     x = 5
 
-[case testSixWithMetaclassAndBase]
+[case testSixMetaclassAndBase]
+from typing import Iterable, Iterator
 import six
-class M(type):
+class M(type, Iterable[int]):
     x = 5
+    def __iter__(self) -> Iterator[int]: ...
 class A:
     def foo(self): pass
 class B:
     def bar(self): pass
 class C1(six.with_metaclass(M, A)): pass
+ at six.add_metaclass(M)
+class D1(A): pass
 class C2(six.with_metaclass(M, A, B)): pass
+ at six.add_metaclass(M)
+class D2(A, B): pass
 reveal_type(type(C1).x)  # E: Revealed type is 'builtins.int'
+reveal_type(type(D1).x)  # E: Revealed type is 'builtins.int'
 reveal_type(type(C2).x)  # E: Revealed type is 'builtins.int'
+reveal_type(type(D2).x)  # E: Revealed type is 'builtins.int'
 C1().foo()
+D1().foo()
 C1().bar()  # E: "C1" has no attribute "bar"
+D1().bar()  # E: "D1" has no attribute "bar"
+for x in C1: reveal_type(x)  # E: Revealed type is 'builtins.int*'
+for x in C2: reveal_type(x)  # E: Revealed type is 'builtins.int*'
 C2().foo()
+D2().foo()
 C2().bar()
+D2().bar()
 C2().baz()  # E: "C2" has no attribute "baz"
+D2().baz()  # E: "D2" has no attribute "baz"
 
-[case testSixWithMetaclassGenerics]
+[case testSixMetaclassGenerics]
 from typing import Generic, GenericMeta, TypeVar
 import six
 class DestroyableMeta(type):
@@ -3527,12 +3946,16 @@ class ArcMeta(GenericMeta, DestroyableMeta):
     pass
 class Arc(six.with_metaclass(ArcMeta, Generic[T_co], Destroyable)):
     pass
+ at six.add_metaclass(ArcMeta)
+class Arc1(Generic[T_co], Destroyable):
+    pass
 class MyDestr(Destroyable):
     pass
 reveal_type(Arc[MyDestr]())  # E: Revealed type is '__main__.Arc[__main__.MyDestr*]'
+reveal_type(Arc1[MyDestr]())  # E: Revealed type is '__main__.Arc1[__main__.MyDestr*]'
 [builtins fixtures/bool.pyi]
 
-[case testSixWithMetaclassErrors]
+[case testSixMetaclassErrors]
 import six
 class M(type): pass
 class A(object): pass
@@ -3540,12 +3963,35 @@ def f() -> type: return M
 class C1(six.with_metaclass(M), object): pass  # E: Invalid base class
 class C2(C1, six.with_metaclass(M)): pass  # E: Invalid base class
 class C3(six.with_metaclass(A)): pass  # E: Metaclasses not inheriting from 'type' are not supported
-class C4(six.with_metaclass(M), metaclass=M): pass  # E: Invalid base class
+ at six.add_metaclass(A)  # E: Metaclasses not inheriting from 'type' are not supported
+class D3(A): pass
+class C4(six.with_metaclass(M), metaclass=M): pass  # E: Multiple metaclass definitions
+ at six.add_metaclass(M)  # E: Multiple metaclass definitions
+class D4(metaclass=M): pass
 class C5(six.with_metaclass(f())): pass  # E: Dynamic metaclass not supported for 'C5'
+ at six.add_metaclass(f())  # E: Dynamic metaclass not supported for 'D5'
+class D5: pass
+
+ at six.add_metaclass(M)  # E: Multiple metaclass definitions
+class CD(six.with_metaclass(M)): pass
+
+class M1(type): pass
+class Q1(metaclass=M1): pass
+ at six.add_metaclass(M)  # E: Inconsistent metaclass structure for 'CQA'
+class CQA(Q1): pass
+class CQW(six.with_metaclass(M, Q1)): pass  # E: Inconsistent metaclass structure for 'CQW'
 
-[case testSixWithMetaclassErrors_python2-skip]
-# No error here yet
+[case testSixMetaclassErrors_python2]
+# flags: --python-version 2.7
 import six
 class M(type): pass
-class C4(six.with_metaclass(M)):
+class C4(six.with_metaclass(M)):  # E: Multiple metaclass definitions
     __metaclass__ = M
+
+[case testSixMetaclassAny]
+import t  # type: ignore
+import six
+class E(metaclass=t.M): pass
+class F(six.with_metaclass(t.M)): pass
+ at six.add_metaclass(t.M)
+class G: pass
diff --git a/test-data/unit/check-classvar.test b/test-data/unit/check-classvar.test
index 02ba8f0..c20f4f4 100644
--- a/test-data/unit/check-classvar.test
+++ b/test-data/unit/check-classvar.test
@@ -151,7 +151,7 @@ A().x.append(1)
 A().x.append('')
 [builtins fixtures/list.pyi]
 [out]
-main:4: error: List item 0 has incompatible type "str"
+main:4: error: List item 0 has incompatible type "str"; expected "int"
 main:6: error: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
 
 [case testClassVarWithUnion]
diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test
index b8e1346..6ea4e3e 100644
--- a/test-data/unit/check-custom-plugin.test
+++ b/test-data/unit/check-custom-plugin.test
@@ -118,7 +118,7 @@ from mypy_extensions import DefaultArg
 from m import Signal
 s: Signal[[int, DefaultArg(str, 'x')]] = Signal()
 reveal_type(s) # E: Revealed type is 'm.Signal[def (builtins.int, x: builtins.str =)]'
-s.x # E: Signal[Callable[[int, str], None]] has no attribute "x"
+s.x # E: "Signal[Callable[[int, str], None]]" has no attribute "x"
 ss: Signal[int, str] # E: Invalid "Signal" type (expected "Signal[[t, ...]]")
 [file m.py]
 from typing import TypeVar, Generic, Callable
diff --git a/test-data/unit/check-default-plugin.test b/test-data/unit/check-default-plugin.test
new file mode 100644
index 0000000..1aae55f
--- /dev/null
+++ b/test-data/unit/check-default-plugin.test
@@ -0,0 +1,33 @@
+-- Test cases for the default plugin
+--
+-- Note that we have additional test cases in pythoneval.test (that use real typeshed stubs).
+
+
+[case testContextManagerWithGenericFunction]
+from contextlib import contextmanager
+from typing import TypeVar, Iterator
+
+T = TypeVar('T')
+
+ at contextmanager
+def yield_id(item: T) -> Iterator[T]:
+    yield item
+
+reveal_type(yield_id) # E: Revealed type is 'def [T] (item: T`-1) -> contextlib.GeneratorContextManager[T`-1]'
+
+with yield_id(1) as x:
+    reveal_type(x) # E: Revealed type is 'builtins.int*'
+
+f = yield_id
+def g(x, y): pass
+f = g # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[T], GeneratorContextManager[T]]")
+[typing fixtures/typing-full.pyi]
+
+[case testContextManagerWithUnspecifiedArguments]
+from contextlib import contextmanager
+from typing import Callable, Iterator
+
+c: Callable[..., Iterator[int]]
+reveal_type(c) # E: Revealed type is 'def (*Any, **Any) -> typing.Iterator[builtins.int]'
+reveal_type(contextmanager(c)) # E: Revealed type is 'def (*Any, **Any) -> contextlib.GeneratorContextManager[builtins.int*]'
+[typing fixtures/typing-full.pyi]
diff --git a/test-data/unit/check-dynamic-typing.test b/test-data/unit/check-dynamic-typing.test
index 68a174e..21a6217 100644
--- a/test-data/unit/check-dynamic-typing.test
+++ b/test-data/unit/check-dynamic-typing.test
@@ -302,7 +302,7 @@ h = None # type: Callable[[A], None]
 
 f()     # E: Too few arguments for "f"
 f(x, x) # E: Too many arguments for "f"
-g = f   # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
+g = f   # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[], None]")
 f(a)
 f(x)
 a = f(a)
@@ -319,10 +319,10 @@ g1 = None # type: Callable[[A], None]
 g2 = None # type: Callable[[A, A], None]
 a = None # type: A
 
-g1 = f0 # E: Incompatible types in assignment (expression has type Callable[[], Any], variable has type Callable[[A], None])
-g2 = f0 # E: Incompatible types in assignment (expression has type Callable[[], Any], variable has type Callable[[A, A], None])
-g0 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[], None])
-g1 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[A], None])
+g1 = f0 # E: Incompatible types in assignment (expression has type "Callable[[], Any]", variable has type "Callable[[A], None]")
+g2 = f0 # E: Incompatible types in assignment (expression has type "Callable[[], Any]", variable has type "Callable[[A, A], None]")
+g0 = f2 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[], None]")
+g1 = f2 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[A], None]")
 
 g0 = g0
 g2 = f2
@@ -373,9 +373,9 @@ class B: pass
 main:10: error: Too many arguments for "f01"
 main:11: error: Too few arguments for "f13"
 main:12: error: Too many arguments for "f13"
-main:13: error: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[A, A], None])
-main:14: error: Incompatible types in assignment (expression has type Callable[[Any, Any, Any], Any], variable has type Callable[[], None])
-main:15: error: Incompatible types in assignment (expression has type Callable[[Any, Any, Any], Any], variable has type Callable[[A, A, A, A], None])
+main:13: error: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[A, A], None]")
+main:14: error: Incompatible types in assignment (expression has type "Callable[[Any, Any, Any], Any]", variable has type "Callable[[], None]")
+main:15: error: Incompatible types in assignment (expression has type "Callable[[Any, Any, Any], Any]", variable has type "Callable[[A, A, A, A], None]")
 
 [case testSkipTypeCheckingWithImplicitSignature]
 
@@ -407,9 +407,9 @@ g1 = None # type: Callable[[A], None]
 g2 = None # type: Callable[[A, A], None]
 a = None # type: A
 
-g0 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
-g2 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[A, A], None])
-a = a.f  # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type "A")
+g0 = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[], None]")
+g2 = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[A, A], None]")
+a = a.f  # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "A")
 
 class A:
     def g(self) -> None:
@@ -434,7 +434,7 @@ g0 = None # type: Callable[[], None]
 g1 = None # type: Callable[[A], None]
 a = None # type: A
 
-g0 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
+g0 = a.f # E: Incompatible types in assignment (expression has type "Callable[[Any], Any]", variable has type "Callable[[], None]")
 
 g1 = a.f
 a = a.f(a)
@@ -485,7 +485,7 @@ class A:
   def __init__(self, a, b): pass
 [out]
 main:6: error: Too few arguments for "A"
-main:7: error: Incompatible types in assignment (expression has type Type[A], variable has type Callable[[A], A])
+main:7: error: Incompatible types in assignment (expression has type "Type[A]", variable has type "Callable[[A], A]")
 
 [case testUsingImplicitTypeObjectWithIs]
 
@@ -571,7 +571,7 @@ from typing import Any, Callable
 f1 = None # type: Callable[[Any], None]
 f2 = None # type: Callable[[Any, Any], None]
 
-f1 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], None], variable has type Callable[[Any], None])
+f1 = f2 # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], None]", variable has type "Callable[[Any], None]")
 
 
 -- Overriding
@@ -653,6 +653,42 @@ class A(B):
         x()
 [out]
 
+[case testInvalidOverrideWithImplicitSignatureAndClassMethod1]
+class B:
+    @classmethod
+    def f(cls, x, y): pass
+class A(B):
+    @classmethod
+    def f(cls, x, y, z): pass # No error since no annotations
+[builtins fixtures/classmethod.pyi]
+
+[case testInvalidOverrideWithImplicitSignatureAndClassMethod2]
+class B:
+    @classmethod
+    def f(cls, x: int, y): pass
+class A(B):
+    @classmethod
+    def f(cls, x, y, z): pass # No error since no annotations
+[builtins fixtures/classmethod.pyi]
+
+[case testInvalidOverrideWithImplicitSignatureAndStaticMethod1]
+class B:
+    @staticmethod
+    def f(x, y): pass
+class A(B):
+    @staticmethod
+    def f(x, y, z): pass # No error since no annotations
+[builtins fixtures/classmethod.pyi]
+
+[case testInvalidOverrideWithImplicitSignatureAndStaticMethod2]
+class B:
+    @staticmethod
+    def f(self, x: int, y): pass
+class A(B):
+    @staticmethod
+    def f(self, x, y, z): pass # No error since no annotations
+[builtins fixtures/classmethod.pyi]
+
 
 -- Don't complain about too few/many arguments in dynamic functions
 -- ----------------------------------------------------------------
diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test
index 011580e..30984ad 100644
--- a/test-data/unit/check-enum.test
+++ b/test-data/unit/check-enum.test
@@ -325,7 +325,7 @@ main:17: error: Enum() with dict literal requires string literals
 main:18: error: Unexpected arguments to Enum()
 main:19: error: Unexpected arguments to Enum()
 main:20: error: Unexpected arguments to Enum()
-main:22: error: Type[W] has no attribute "c"
+main:22: error: "Type[W]" has no attribute "c"
 
 [case testFunctionalEnumFlag]
 from enum import Flag, IntFlag
@@ -360,6 +360,24 @@ x = y
 [out]
 main:8: error: Incompatible types in assignment (expression has type "__main__.B.E", variable has type "__main__.A.E")
 
+[case testEnumWorkWithForward]
+from enum import Enum
+a: E = E.x
+class E(Enum):
+    x = 1
+    y = 2
+[out]
+
+[case testEnumWorkWithForward2]
+from enum import Enum
+b: F
+F = Enum('F', {'x': 1, 'y': 2})
+
+def fn(x: F) -> None:
+    pass
+fn(b)
+[out]
+
 [case testFunctionalEnum_python2]
 from enum import Enum
 Eu = Enum(u'Eu', u'a b')
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
index 03bf5bd..8b862d0 100644
--- a/test-data/unit/check-expressions.test
+++ b/test-data/unit/check-expressions.test
@@ -1135,7 +1135,7 @@ b'%a' % 3
 from typing import Any, Dict
 a = None # type: Any
 ds, do, di = None, None, None # type: Dict[str, int], Dict[object, int], Dict[int, int]
-'%(a)' % 1  # E: Format requires a mapping (expression has type "int", expected type for mapping is Dict[Any, Any])
+'%(a)' % 1  # E: Format requires a mapping (expression has type "int", expected type for mapping is "Dict[Any, Any]")
 '%()d' % a
 '%()d' % ds
 '%()d' % do
@@ -1199,7 +1199,7 @@ f = lambda: ''.x
 f = lambda: ''
 [out]
 main:3: error: "str" has no attribute "x"
-main:4: error: Incompatible types in assignment (expression has type Callable[[], str], variable has type Callable[[], int])
+main:4: error: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "Callable[[], int]")
 main:4: error: Incompatible return value type (got "str", expected "int")
 
 [case testVoidLambda]
@@ -1215,7 +1215,7 @@ from typing import Iterator, Callable
 lambda: (yield)
 
 gen: Callable[[], Iterator[str]]
-gen = (lambda: (yield 1))  # E: Incompatible types in yield (actual type "int", expected type "str")
+gen = (lambda: (yield 1))  # E: Incompatible types in "yield" (actual type "int", expected type "str")
 
 def fun(cb: Callable[[], Iterator[str]]) -> None:
     pass
@@ -1231,7 +1231,7 @@ fun(lambda: (yield from [1]))  # E: Incompatible types in "yield from" (actual t
 from typing import List
 a = None # type: List[A]
 a = [x for x in a]
-b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]
+b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]; expected List[B]
 class A: pass
 class B: pass
 [builtins fixtures/for.pyi]
@@ -1240,7 +1240,7 @@ class B: pass
 from typing import List, Tuple
 l = None # type: List[Tuple[A, Tuple[A, B]]]
 a = [a2 for a1, (a2, b1) in l] # type: List[A]
-b = [a2 for a1, (a2, b1) in l] # type: List[B] # E: List comprehension has incompatible type List[A]
+b = [a2 for a1, (a2, b1) in l] # type: List[B] # E: List comprehension has incompatible type List[A]; expected List[B]
 class A: pass
 class B: pass
 [builtins fixtures/for.pyi]
@@ -1259,7 +1259,7 @@ from typing import List
 a = None # type: List[A]
 b = None # type: List[B]
 b = [f(x) for x in a]
-a = [f(x) for x in a] # E: List comprehension has incompatible type List[B]
+a = [f(x) for x in a] # E: List comprehension has incompatible type List[B]; expected List[A]
 ([f(x) for x in b])   # E: Argument 1 to "f" has incompatible type "B"; expected "A"
 class A: pass
 class B: pass
@@ -1285,7 +1285,7 @@ from typing import List
 class A:
     a = None # type: List[A]
     a = [x for x in a]
-    b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]
+    b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]; expected List[B]
 class B: pass
 [builtins fixtures/for.pyi]
 [out]
@@ -1299,7 +1299,7 @@ class B: pass
 from typing import Set
 a = None # type: Set[A]
 a = {x for x in a}
-b = {x for x in a} # type: Set[B] # E: Set comprehension has incompatible type Set[A]
+b = {x for x in a} # type: Set[B] # E: Set comprehension has incompatible type Set[A]; expected Set[B]
 class A: pass
 class B: pass
 [builtins fixtures/set.pyi]
@@ -1322,7 +1322,7 @@ class B: pass
 [out]
 main:5: error: Key expression in dictionary comprehension has incompatible type "A"; expected type "B"
 main:5: error: Value expression in dictionary comprehension has incompatible type "B"; expected type "A"
-main:6: error: Incompatible types in assignment (expression has type Dict[A, B], variable has type "A")
+main:6: error: Incompatible types in assignment (expression has type "Dict[A, B]", variable has type "A")
 
 
 [case testDictionaryComprehensionWithNonDirectMapping]
@@ -1351,7 +1351,7 @@ from typing import Iterator
 a = None # type: Iterator[int]
 a = (x for x in a)
 b = None # type: Iterator[str]
-b = (x for x in a) # E: Generator has incompatible item type "int"
+b = (x for x in a) # E: Generator has incompatible item type "int"; expected "str"
 [builtins fixtures/for.pyi]
 
 [case testGeneratorIncompatibleErrorMessage]
@@ -1359,7 +1359,7 @@ from typing import Callable, Iterator, List
 
 a = []  # type: List[Callable[[], str]]
 b = None  # type: Iterator[Callable[[], int]]
-b = (x for x in a)  # E: Generator has incompatible item type Callable[[], str]
+b = (x for x in a)  # E: Generator has incompatible item type "Callable[[], str]"; expected "Callable[[], int]"
 [builtins fixtures/list.pyi]
 
 -- Conditional expressions
@@ -1394,7 +1394,7 @@ y = '' # E: Incompatible types in assignment (expression has type "str", variabl
 import typing
 x = [1] if bool() else []
 x = [1]
-x = ['x'] # E: List item 0 has incompatible type "str"
+x = ['x'] # E: List item 0 has incompatible type "str"; expected "int"
 [builtins fixtures/list.pyi]
 
 
@@ -1415,9 +1415,9 @@ cast(A, f)
 def f() -> None:
     pass
 [out]
-main:5: error: Unsupported left operand type for + (None)
-main:6: error: Unsupported left operand type for + (Callable[[], None])
-main:7: error: Unsupported operand types for + ("A" and Callable[[], None])
+main:5: error: Unsupported left operand type for + ("None")
+main:6: error: Unsupported left operand type for + ("Callable[[], None]")
+main:7: error: Unsupported operand types for + ("A" and "Callable[[], None]")
 
 [case testOperatorMethodWithInvalidArgCount]
 
@@ -1550,10 +1550,10 @@ def g() -> Iterator[int]:
 [case testDictWithKeywordArgsOnly]
 from typing import Dict, Any
 d1 = dict(a=1, b=2) # type: Dict[str, int]
-d2 = dict(a=1, b='') # type: Dict[str, int] # E: Dict entry 1 has incompatible type "str": "str"
-d3 = dict(a=1) # type: Dict[int, int] # E: Dict entry 0 has incompatible type "str": "int"
+d2 = dict(a=1, b='') # type: Dict[str, int] # E: Dict entry 1 has incompatible type "str": "str"; expected "str": "int"
+d3 = dict(a=1) # type: Dict[int, int] # E: Dict entry 0 has incompatible type "str": "int"; expected "int": "int"
 d4 = dict(a=1, b=1)
-d4.xyz # E: Dict[str, int] has no attribute "xyz"
+d4.xyz # E: "Dict[str, int]" has no attribute "xyz"
 d5 = dict(a=1, b='') # type: Dict[str, Any]
 [builtins fixtures/dict.pyi]
 
@@ -1567,8 +1567,8 @@ dict(undefined) # E: Name 'undefined' is not defined
 [case testDictFromList]
 from typing import Dict
 d = dict([(1, 'x'), (2, 'y')])
-d() # E: Dict[int, str] not callable
-d2 = dict([(1, 'x')]) # type: Dict[str, str] # E: List item 0 has incompatible type "Tuple[int, str]"
+d() # E: "Dict[int, str]" not callable
+d2 = dict([(1, 'x')]) # type: Dict[str, str] # E: List item 0 has incompatible type "Tuple[int, str]"; expected "Tuple[str, str]"
 [builtins fixtures/dict.pyi]
 
 [case testDictFromIterableAndKeywordArg]
@@ -1576,10 +1576,10 @@ from typing import Dict
 it = [('x', 1)]
 
 d = dict(it, x=1)
-d() # E: Dict[str, int] not callable
+d() # E: "Dict[str, int]" not callable
 
 d2 = dict(it, x='') # E: Cannot infer type argument 2 of "dict"
-d2() # E: Dict[Any, Any] not callable
+d2() # E: "Dict[Any, Any]" not callable
 
 d3 = dict(it, x='') # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type "str"; expected "int"
 [builtins fixtures/dict.pyi]
@@ -1591,7 +1591,7 @@ dict(it, x='y') # E: Keyword argument only valid with "str" key type in call to
 
 [case testDictFromIterableAndKeywordArg3]
 d = dict([], x=1)
-d() # E: Dict[str, int] not callable
+d() # E: "Dict[str, int]" not callable
 [builtins fixtures/dict.pyi]
 
 [case testDictFromIterableAndStarStarArgs]
@@ -1600,20 +1600,20 @@ it = [('x', 1)]
 
 kw = {'x': 1}
 d = dict(it, **kw)
-d() # E: Dict[str, int] not callable
+d() # E: "Dict[str, int]" not callable
 
 kw2 = {'x': ''}
 d2 = dict(it, **kw2) # E: Cannot infer type argument 2 of "dict"
-d2() # E: Dict[Any, Any] not callable
+d2() # E: "Dict[Any, Any]" not callable
 
-d3 = dict(it, **kw2) # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type **Dict[str, str]; expected "int"
+d3 = dict(it, **kw2) # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type "**Dict[str, str]"; expected "int"
 [builtins fixtures/dict.pyi]
 
 [case testDictFromIterableAndStarStarArgs2]
 it = [(1, 'x')]
 kw = {'x': 'y'}
 d = dict(it, **kw) # E: Keyword argument only valid with "str" key type in call to "dict"
-d() # E: Dict[int, str] not callable
+d() # E: "Dict[int, str]" not callable
 [builtins fixtures/dict.pyi]
 
 [case testUserDefinedClassNamedDict]
@@ -1633,10 +1633,10 @@ class D1(dict): pass # Implicit base class Dict[Any, Any]
 D1([(1, 2)], x=1)
 class D2(Dict[T, S], Generic[T, S]): pass
 da = D2([('x', 2)], x=1)
-da() # E: D2[str, int] not callable
+da() # E: "D2[str, int]" not callable
 D2([(1, 2)], x=1) # E: Keyword argument only valid with "str" key type in call to "dict"
 db = D2(x=1)
-db() # E: D2[str, int] not callable
+db() # E: "D2[str, int]" not callable
 [builtins fixtures/dict.pyi]
 
 [case testSpecialSignatureForSubclassOfDict2]
@@ -1653,7 +1653,7 @@ S = TypeVar('S')
 class D(Dict[T, S], Generic[T, S]):
     def __init__(self, x: S, y: T) -> None: pass
 d = D(1, y='')
-d() # E: D[str, int] not callable
+d() # E: "D[str, int]" not callable
 [builtins fixtures/dict.pyi]
 
 [case testRevealType]
@@ -1680,12 +1680,35 @@ x = 1 + 1
 [out]
 main:2: error: Revealed type is 'builtins.int'
 
+[case testRevealUncheckedFunction]
+def f():
+    x = 42
+    reveal_type(x)
+[out]
+main:3: error: Revealed type is 'Any'
+main:3: note: 'reveal_type' always outputs 'Any' in unchecked functions
+
+[case testRevealCheckUntypedDefs]
+# flags: --check-untyped-defs
+def f():
+    x = 42
+    reveal_type(x)
+[out]
+main:4: error: Revealed type is 'builtins.int'
+
+[case testRevealTypedDef]
+def f() -> None:
+    x = 42
+    reveal_type(x)
+[out]
+main:3: error: Revealed type is 'builtins.int'
+
 [case testEqNone]
 None == None
 [builtins fixtures/ops.pyi]
 
 [case testLtNone]
-None < None  # E: Unsupported left operand type for < (None)
+None < None  # E: Unsupported left operand type for < ("None")
 [builtins fixtures/ops.pyi]
 
 [case testDictWithStarExpr]
@@ -1700,8 +1723,8 @@ a = {'a': 1}
 b = {'z': 26, **a}
 c = {**b}
 d = {**a, **b, 'c': 3}
-e = {1: 'a', **a}  # E: Argument 1 to "update" of "dict" has incompatible type Dict[str, int]; expected Mapping[int, str]
-f = {**b}  # type: Dict[int, int]  # E: List item 0 has incompatible type Dict[str, int]
+e = {1: 'a', **a}  # E: Argument 1 to "update" of "dict" has incompatible type "Dict[str, int]"; expected "Mapping[int, str]"
+f = {**b}  # type: Dict[int, int]  # E: List item 0 has incompatible type "Dict[str, int]"; expected "Mapping[int, int]"
 [builtins fixtures/dict.pyi]
 
 [case testDictIncompatibleTypeErrorMessage]
@@ -1710,11 +1733,39 @@ from typing import Dict, Callable
 def things() -> int:
     return 42
 
-stuff: Dict[int, Callable[[], str]] = {  # E: Dict entry 0 has incompatible type "int": Callable[[], int]
+stuff: Dict[int, Callable[[], str]] = {  # E: Dict entry 0 has incompatible type "int": "Callable[[], int]"; expected "int": "Callable[[], str]"
     1: things
 }
 [builtins fixtures/dict.pyi]
 
+[case testDictIncompatibleKeyVerbosity]
+from typing import Dict
+import mod
+
+class A: ...
+class B(A): ...
+
+d: Dict[A, B] = {A(): mod.B()}  # E: Dict entry 0 has incompatible type "A": "mod.B"; expected "A": "__main__.B"
+
+[file mod.py]
+class B: ...
+
+[builtins fixtures/dict.pyi]
+
+[case testDictIncompatibleValueVerbosity]
+from typing import Dict
+import mod
+
+class A: ...
+class B(A): ...
+
+d: Dict[B, A] = {mod.B(): A()}  # E: Dict entry 0 has incompatible type "mod.B": "A"; expected "__main__.B": "A"
+
+[file mod.py]
+class B: ...
+
+[builtins fixtures/dict.pyi]
+
 -- Type checker default plugin
 -- ---------------------------
 
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
index dc856f6..37c2bbe 100644
--- a/test-data/unit/check-flags.test
+++ b/test-data/unit/check-flags.test
@@ -44,6 +44,75 @@ def f():
     1 + "str"
 [out]
 main:2: error: Function is missing a type annotation
+
+[case testDisallowUntypedDefsUntypedDecorator]
+# flags: --disallow-untyped-decorators
+def d(p):
+    return p
+
+ at d  # E: Untyped decorator makes function "f" untyped
+def f(i: int) -> int:
+    return i
+
+[case testDisallowUntypedDecoratorsUnresolvedDecorator]
+# flags: --disallow-untyped-decorators --ignore-missing-imports
+from nonexistent import d
+
+ at d  # E: Untyped decorator makes function "f" untyped
+def f(i: int) -> int:
+    return i
+
+[case testDisallowUntypedDecoratorUntypedDef]
+# flags: --disallow-untyped-decorators
+
+def d(p):
+    return p
+
+ at d  # no error
+def f(): pass
+
+[case testDisallowUntypedDecoratorsPartialFunction]
+# flags: --disallow-untyped-decorators
+
+def d(p):
+    return p
+
+ at d  # E: Untyped decorator makes function "f" untyped
+def f(x) -> None: pass
+
+ at d  # E: Untyped decorator makes function "g" untyped
+def g(x, y: int): pass
+
+ at d  # E: Untyped decorator makes function "h" untyped
+def h(x: int): pass
+
+[case testDisallowUntypedDecoratorsImpreciseDecorator]
+# flags: --disallow-untyped-decorators
+from typing import Any
+
+def d(p) -> Any:
+    return p
+
+ at d  # no error
+def f() -> None: pass
+
+[case testDisallowUntypedDecoratorsMultipleDecorators]
+# flags: --disallow-untyped-decorators
+from typing import Any
+
+def d1(p):
+    return p
+def d2(p):
+    return p
+def d3(p) -> Any:
+    return p
+
+ at d1  # E: Untyped decorator makes function "f" untyped
+ at d2  # E: Untyped decorator makes function "f" untyped
+ at d3  # no error
+ at d1  # E: Untyped decorator makes function "f" untyped
+def f() -> None: pass
+
 [case testUntypedDefDisallowUnannotated]
 # flags: --disallow-any=unannotated
 def f():
@@ -196,7 +265,7 @@ reveal_type(f() or no_return())  # E: Revealed type is 'builtins.int'
 # flags: --warn-no-return
 from mypy_extensions import NoReturn
 
-x = 0  # type: NoReturn  # E: Incompatible types in assignment (expression has type "int", variable has type NoReturn)
+x = 0  # type: NoReturn  # E: Incompatible types in assignment (expression has type "int", variable has type "NoReturn")
 [builtins fixtures/dict.pyi]
 
 [case testNoReturnImportFromTyping]
@@ -212,7 +281,7 @@ def no_return() -> NoReturn: pass
 def f() -> NoReturn:
   no_return()
 
-x: NoReturn = 0 # E: Incompatible types in assignment (expression has type "int", variable has type NoReturn)
+x: NoReturn = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "NoReturn")
 [builtins fixtures/dict.pyi]
 
 [case testShowErrorContextFunction]
@@ -393,7 +462,7 @@ x = 0
 x = None
 [file optional.py]
 x = 0
-x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+x = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 
 [file mypy.ini]
 [[mypy]
@@ -417,10 +486,10 @@ f(optional_int)  # optional ints can be used as ints in this file
 [file optional.py]
 import standard
 def f(x: int) -> None: pass
-standard.an_int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+standard.an_int = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 standard.optional_int = None  # OK -- explicitly declared as optional
 f(standard.an_int)  # ints can be used as ints
-f(standard.optional_int)  # E: Argument 1 to "f" has incompatible type None; expected "int"
+f(standard.optional_int)  # E: Argument 1 to "f" has incompatible type "None"; expected "int"
 
 [file mypy.ini]
 [[mypy]
@@ -534,9 +603,9 @@ def foo(l: List[Unchecked]) -> List[Unchecked]:
     return l
 [builtins fixtures/list.pyi]
 [out]
-main:5: error: Return type becomes List[Any] due to an unfollowed import
-main:5: error: Argument 1 to "foo" becomes List[Any] due to an unfollowed import
-main:6: error: Type of variable becomes List[Any] due to an unfollowed import
+main:5: error: Return type becomes "List[Any]" due to an unfollowed import
+main:5: error: Argument 1 to "foo" becomes "List[Any]" due to an unfollowed import
+main:6: error: Type of variable becomes "List[Any]" due to an unfollowed import
 
 [case testDisallowImplicitAnyInherit]
 # flags: --ignore-missing-imports --disallow-any=unimported
@@ -546,7 +615,7 @@ from typing import List
 class C(Unchecked): # E: Base type Unchecked becomes "Any" due to an unfollowed import
     pass
 
-class A(List[Unchecked]): # E: Base type becomes List[Any] due to an unfollowed import
+class A(List[Unchecked]): # E: Base type becomes "List[Any]" due to an unfollowed import
     pass
 [builtins fixtures/list.pyi]
 
@@ -557,7 +626,7 @@ from typing import List
 
 X = List[Unchecked]
 
-def f(x: X) -> None:  # E: Argument 1 to "f" becomes List[Any] due to an unfollowed import
+def f(x: X) -> None:  # E: Argument 1 to "f" becomes "List[Any]" due to an unfollowed import
     pass
 [builtins fixtures/list.pyi]
 
@@ -568,7 +637,7 @@ from typing import List, cast
 
 
 foo = [1, 2, 3]
-cast(List[Unchecked], foo)  # E: Target type of cast becomes List[Any] due to an unfollowed import
+cast(List[Unchecked], foo)  # E: Target type of cast becomes "List[Any]" due to an unfollowed import
 cast(Unchecked, foo)  # E: Target type of cast becomes "Any" due to an unfollowed import
 [builtins fixtures/list.pyi]
 
@@ -592,15 +661,15 @@ T = TypeVar('T', Unchecked, List[Unchecked], str)
 [builtins fixtures/list.pyi]
 [out]
 main:5: error: Constraint 1 becomes "Any" due to an unfollowed import
-main:5: error: Constraint 2 becomes List[Any] due to an unfollowed import
+main:5: error: Constraint 2 becomes "List[Any]" due to an unfollowed import
 
 [case testDisallowImplicitAnyNewType]
 # flags: --ignore-missing-imports --disallow-any=unimported
 from typing import NewType, List
 from missing import Unchecked
 
-Baz = NewType('Baz', Unchecked)  # E: Argument 2 to NewType(...) must be subclassable (got Any)
-Bar = NewType('Bar', List[Unchecked])  # E: Argument 2 to NewType(...) becomes List[Any] due to an unfollowed import
+Baz = NewType('Baz', Unchecked)  # E: Argument 2 to NewType(...) must be subclassable (got "Any")
+Bar = NewType('Bar', List[Unchecked])  # E: Argument 2 to NewType(...) becomes "List[Any]" due to an unfollowed import
 
 [builtins fixtures/list.pyi]
 
@@ -614,7 +683,7 @@ def foo(f: Callable[[], Unchecked]) -> Tuple[Unchecked]:
 [builtins fixtures/list.pyi]
 [out]
 main:5: error: Return type becomes "Tuple[Any]" due to an unfollowed import
-main:5: error: Argument 1 to "foo" becomes Callable[[], Any] due to an unfollowed import
+main:5: error: Argument 1 to "foo" becomes "Callable[[], Any]" due to an unfollowed import
 
 [case testDisallowImplicitAnySubclassingExplicitAny]
 # flags: --ignore-missing-imports --disallow-any=unimported --disallow-subclassing-any
@@ -651,7 +720,7 @@ from mypy_extensions import TypedDict
 from typing import List
 from x import Unchecked
 
-M = TypedDict('M', {'x': str, 'y': List[Unchecked]})  # E: Type of a TypedDict key becomes List[Any] due to an unfollowed import
+M = TypedDict('M', {'x': str, 'y': List[Unchecked]})  # E: Type of a TypedDict key becomes "List[Any]" due to an unfollowed import
 
 def f(m: M) -> M: pass  # no error
 [builtins fixtures/dict.pyi]
@@ -701,7 +770,7 @@ def d(f) -> Callable[..., None]:
     return f
 
 @d
-def g(i: int, s: str) -> None: pass  # E: Type of decorated function contains type "Any" (Callable[..., None])
+def g(i: int, s: str) -> None: pass  # E: Type of decorated function contains type "Any" ("Callable[..., None]")
 
 [builtins fixtures/list.pyi]
 [case testDisallowAnyDecoratedNonexistentDecorator]
@@ -721,13 +790,13 @@ def d2(f) -> Callable[[int], List[Any]]: pass
 def d3(f) -> Callable[[Any], List[str]]: pass
 
 @d
-def f(i: int, s: str) -> None:  # E: Type of decorated function contains type "Any" (Callable[[int, Any], Any])
+def f(i: int, s: str) -> None:  # E: Type of decorated function contains type "Any" ("Callable[[int, Any], Any]")
     pass
 @d2
-def g(i: int) -> None:  # E: Type of decorated function contains type "Any" (Callable[[int], List[Any]])
+def g(i: int) -> None:  # E: Type of decorated function contains type "Any" ("Callable[[int], List[Any]]")
     pass
 @d3
-def h(i: int) -> None:  # E: Type of decorated function contains type "Any" (Callable[[Any], List[str]])
+def h(i: int) -> None:  # E: Type of decorated function contains type "Any" ("Callable[[Any], List[str]]")
     pass
 [builtins fixtures/list.pyi]
 
@@ -814,9 +883,9 @@ def g(s: List[Any]) -> None:
 
 f(0)
 
-# type of list below is inferred with expected type of List[Any], so that becomes it's type
+# type of list below is inferred with expected type of "List[Any]", so that becomes it's type
 # instead of List[str]
-g([''])  # E: Expression type contains "Any" (has type List[Any])
+g([''])  # E: Expression type contains "Any" (has type "List[Any]")
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprAllowsAnyInCast]
@@ -847,8 +916,8 @@ n = Foo().g  # type: Any  # E: Expression has type "Any"
 from typing import List
 
 l: List = []
-l.append(1)  # E: Expression type contains "Any" (has type List[Any])
-k = l[0]  # E: Expression type contains "Any" (has type List[Any])  # E: Expression has type "Any"
+l.append(1)  # E: Expression type contains "Any" (has type "List[Any]")
+k = l[0]  # E: Expression type contains "Any" (has type "List[Any]")  # E: Expression has type "Any"
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExprTypeVar]
@@ -898,3 +967,37 @@ Movie = TypedDict('Movie', {'name': str, 'year': int})
 def g(m: Movie) -> Movie:
     return m
 [builtins fixtures/dict.pyi]
+
+[case testDisallowIncompleteDefs]
+# flags: --disallow-incomplete-defs
+
+def f(i: int):  # E: Function is missing a return type annotation
+    pass
+def g(i) -> None:  # E: Function is missing a type annotation for one or more arguments
+    pass
+def h(i: int) -> int:  # no error
+    return i
+def i() -> None:  # no error
+    pass
+
+[case testDisallowIncompleteDefsNoReturn]
+# flags: --disallow-incomplete-defs --disallow-untyped-defs
+
+def f(i: int):  # E: Function is missing a return type annotation
+    pass
+
+[case testDisallowIncompleteDefsSelf]
+# flags: --disallow-incomplete-defs
+class C:
+    def foo(self) -> None:  # no error
+        pass
+
+[case testDisallowIncompleteDefsPartiallyAnnotatedParams]
+# flags: --disallow-incomplete-defs
+
+def f(i: int, s):
+    pass
+
+[out]
+main:3: error: Function is missing a return type annotation
+main:3: error: Function is missing a type annotation for one or more arguments
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
index 8f35821..e396062 100644
--- a/test-data/unit/check-functions.test
+++ b/test-data/unit/check-functions.test
@@ -65,10 +65,10 @@ class B(A): pass
 f = None # type: Callable[[B], A]
 g = None # type: Callable[[A], A]  # subtype of f
 h = None # type: Callable[[B], B]  # subtype of f
-g = h  # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[A], A])
-h = f  # E: Incompatible types in assignment (expression has type Callable[[B], A], variable has type Callable[[B], B])
-h = g  # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[B], B])
-g = f  # E: Incompatible types in assignment (expression has type Callable[[B], A], variable has type Callable[[A], A])
+g = h  # E: Incompatible types in assignment (expression has type "Callable[[B], B]", variable has type "Callable[[A], A]")
+h = f  # E: Incompatible types in assignment (expression has type "Callable[[B], A]", variable has type "Callable[[B], B]")
+h = g  # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[B], B]")
+g = f  # E: Incompatible types in assignment (expression has type "Callable[[B], A]", variable has type "Callable[[A], A]")
 f = g
 f = h
 f = f
@@ -79,13 +79,13 @@ h = h
 
 def l(x) -> None: ...
 def r(__, *, x) -> None: ...
-r = l # E: Incompatible types in assignment (expression has type Callable[[Any], None], variable has type Callable[[Any, NamedArg(Any, 'x')], None])
+r = l # E: Incompatible types in assignment (expression has type "Callable[[Any], None]", variable has type "Callable[[Any, NamedArg(Any, 'x')], None]")
 
 [case testSubtypingFunctionsRequiredLeftArgNotPresent]
 
 def l(x, y) -> None: ...
 def r(x) -> None: ...
-r = l # E: Incompatible types in assignment (expression has type Callable[[Any, Any], None], variable has type Callable[[Any], None])
+r = l # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], None]", variable has type "Callable[[Any], None]")
 
 [case testSubtypingFunctionsImplicitNames]
 from typing import Any
@@ -115,10 +115,10 @@ hh = h
 ff = gg
 ff_nonames = ff
 ff_nonames = f_nonames # reset
-ff = ff_nonames # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg(int, 'a'), Arg(str, 'b')], None])
+ff = ff_nonames # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]")
 ff = f # reset
-gg = ff # E: Incompatible types in assignment (expression has type Callable[[Arg(int, 'a'), Arg(str, 'b')], None], variable has type Callable[[Arg(int, 'a'), DefaultArg(str, 'b')], None])
-gg = hh # E: Incompatible types in assignment (expression has type Callable[[Arg(int, 'aa'), DefaultArg(str, 'b')], None], variable has type Callable[[Arg(int, 'a'), DefaultArg(str, 'b')], None])
+gg = ff # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]", variable has type "Callable[[Arg(int, 'a'), DefaultArg(str, 'b')], None]")
+gg = hh # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'aa'), DefaultArg(str, 'b')], None]", variable has type "Callable[[Arg(int, 'a'), DefaultArg(str, 'b')], None]")
 
 [case testSubtypingFunctionsArgsKwargs]
 from typing import Any, Callable
@@ -144,7 +144,7 @@ ee_var = everything
 ee_var = everywhere
 
 ee_var = specific_1 # The difference between Callable[..., blah] and one with a *args: Any, **kwargs: Any is that the ... goes loosely both ways.
-ee_def = specific_1 # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[VarArg(Any), KwArg(Any)], None])
+ee_def = specific_1 # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[VarArg(Any), KwArg(Any)], None]")
 
 [builtins fixtures/dict.pyi]
 
@@ -175,7 +175,7 @@ ff = f
 gg = g
 
 ff = g
-gg = f # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg(int, 'a'), Arg(str, 'b')], None])
+gg = f # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]")
 
 [case testLackOfNamesFastparse]
 
@@ -187,15 +187,15 @@ ff = f
 gg = g
 
 ff = g
-gg = f # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg(int, 'a'), Arg(str, 'b')], None])
+gg = f # E: Incompatible types in assignment (expression has type "Callable[[int, str], None]", variable has type "Callable[[Arg(int, 'a'), Arg(str, 'b')], None]")
 
 [case testFunctionTypeCompatibilityWithOtherTypes]
 from typing import Callable
 f = None # type: Callable[[], None]
 a, o = None, None # type: (A, object)
-a = f   # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type "A")
-f = a   # E: Incompatible types in assignment (expression has type "A", variable has type Callable[[], None])
-f = o   # E: Incompatible types in assignment (expression has type "object", variable has type Callable[[], None])
+a = f   # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "A")
+f = a   # E: Incompatible types in assignment (expression has type "A", variable has type "Callable[[], None]")
+f = o   # E: Incompatible types in assignment (expression has type "object", variable has type "Callable[[], None]")
 f = f() # E: Function does not return a value
 
 f = f
@@ -208,7 +208,7 @@ class A: pass
 from typing import Callable
 f = None # type: Callable[[], None]
 g = None # type: Callable[[], object]
-f = g  # E: Incompatible types in assignment (expression has type Callable[[], object], variable has type Callable[[], None])
+f = g  # E: Incompatible types in assignment (expression has type "Callable[[], object]", variable has type "Callable[[], None]")
 g = f  # OK
 
 f = f
@@ -219,9 +219,9 @@ from typing import Callable
 f = None # type: Callable[[A, A], None]
 g = None # type: Callable[[A, B], None]
 h = None # type: Callable[[B, B], None]
-f = g  # E: Incompatible types in assignment (expression has type Callable[[A, B], None], variable has type Callable[[A, A], None])
-f = h  # E: Incompatible types in assignment (expression has type Callable[[B, B], None], variable has type Callable[[A, A], None])
-g = h  # E: Incompatible types in assignment (expression has type Callable[[B, B], None], variable has type Callable[[A, B], None])
+f = g  # E: Incompatible types in assignment (expression has type "Callable[[A, B], None]", variable has type "Callable[[A, A], None]")
+f = h  # E: Incompatible types in assignment (expression has type "Callable[[B, B], None]", variable has type "Callable[[A, A], None]")
+g = h  # E: Incompatible types in assignment (expression has type "Callable[[B, B], None]", variable has type "Callable[[A, B], None]")
 g = f
 h = f
 h = g
@@ -238,10 +238,10 @@ f = None # type: Callable[[], None]
 g = None # type: Callable[[A], None]
 h = None # type: Callable[[A, A], None]
 
-f = g   # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[], None])
-f = h   # E: Incompatible types in assignment (expression has type Callable[[A, A], None], variable has type Callable[[], None])
-h = f   # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type Callable[[A, A], None])
-h = g   # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[A, A], None])
+f = g   # E: Incompatible types in assignment (expression has type "Callable[[A], None]", variable has type "Callable[[], None]")
+f = h   # E: Incompatible types in assignment (expression has type "Callable[[A, A], None]", variable has type "Callable[[], None]")
+h = f   # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "Callable[[A, A], None]")
+h = g   # E: Incompatible types in assignment (expression has type "Callable[[A], None]", variable has type "Callable[[A, A], None]")
 
 f = f
 g = g
@@ -255,8 +255,8 @@ class A: pass
 t = None # type: type
 a = None # type: A
 
-a = A # E: Incompatible types in assignment (expression has type Type[A], variable has type "A")
-t = f # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type "type")
+a = A # E: Incompatible types in assignment (expression has type "Type[A]", variable has type "A")
+t = f # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "type")
 t = A
 
 class A:
@@ -272,7 +272,7 @@ f = None # type: Callable[[AA], A]
 g = None # type: Callable[[B], B]
 h = None # type: Callable[[A], AA]
 
-h = i  # E: Incompatible types in assignment (expression has type overloaded function, variable has type Callable[[A], AA])
+h = i  # E: Incompatible types in assignment (expression has type overloaded function, variable has type "Callable[[A], AA]")
 f = j
 
 f = i
@@ -311,7 +311,7 @@ a, b, c = None, None, None # type: (A, B, C)
 b = f(a)  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
 a = f(b)  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
 b = f(c)  # E: Incompatible types in assignment (expression has type "C", variable has type "B")
-g4 = f    # E: Incompatible types in assignment (expression has type overloaded function, variable has type Callable[[A], B])
+g4 = f    # E: Incompatible types in assignment (expression has type overloaded function, variable has type "Callable[[A], B]")
 
 g1 = f
 g2 = f
@@ -335,10 +335,10 @@ def f(x: C) -> C: pass
 from typing import Any, Callable, List
 def f(fields: List[Callable[[Any], Any]]): pass
 class C: pass
-f([C])  # E: List item 0 has incompatible type Type[C]
+f([C])  # E: List item 0 has incompatible type "Type[C]"; expected "Callable[[Any], Any]"
 class D:
     def __init__(self, a, b): pass
-f([D])  # E: List item 0 has incompatible type Type[D]
+f([D])  # E: List item 0 has incompatible type "Type[D]"; expected "Callable[[Any], Any]"
 [builtins fixtures/list.pyi]
 
 [case testSubtypingTypeTypeAsCallable]
@@ -353,7 +353,7 @@ from typing import Callable, Type
 class A: pass
 x = None  # type: Callable[..., A]
 y = None  # type: Type[A]
-y = x  # E: Incompatible types in assignment (expression has type Callable[..., A], variable has type Type[A])
+y = x  # E: Incompatible types in assignment (expression has type "Callable[..., A]", variable has type "Type[A]")
 
 -- Default argument values
 -- -----------------------
@@ -388,17 +388,54 @@ class A: pass
 
 [case testDefaultArgumentExpressions2]
 import typing
-def f(x: 'A' = B()) -> None: # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+def f(x: 'A' = B()) -> None: # E: Incompatible default for argument "x" (default has type "B", argument has type "A")
+    b = x # type: B      # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = x # type: A
+
+class B: pass
+class A: pass
+
+[case testDefaultArgumentExpressionsGeneric]
+from typing import TypeVar
+T = TypeVar('T', bound='A')
+def f(x: T = B()) -> None: # E: Incompatible default for argument "x" (default has type "B", argument has type "T")
+    b = x # type: B      # E: Incompatible types in assignment (expression has type "T", variable has type "B")
+    a = x # type: A
+
+class B: pass
+class A: pass
+
+[case testDefaultArgumentExpressionsPython2]
+# flags: --python-version 2.7
+from typing import Tuple
+def f(x = B()): # E: Incompatible default for argument "x" (default has type "B", argument has type "A")
+    # type: (A) -> None
     b = x # type: B      # E: Incompatible types in assignment (expression has type "A", variable has type "B")
     a = x # type: A
 
 class B: pass
 class A: pass
-[out]
+
+[case testDefaultTupleArgumentExpressionsPython2]
+# flags: --python-version 2.7
+from typing import Tuple
+def f((x, y) = (A(), B())): # E: Incompatible default for tuple argument 1 (default has type "Tuple[A, B]", argument has type "Tuple[B, B]")
+    # type: (Tuple[B, B]) -> None
+    b = x # type: B
+    a = x # type: A      # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+def g(a, (x, y) = (A(),)): # E: Incompatible default for tuple argument 2 (default has type "Tuple[A]", argument has type "Tuple[B, B]")
+    # type: (int, Tuple[B, B]) -> None
+    pass
+def h((x, y) = (A(), B(), A())): # E: Incompatible default for tuple argument 1 (default has type "Tuple[A, B, A]", argument has type "Tuple[B, B]")
+    # type: (Tuple[B, B]) -> None
+    pass
+
+class B: pass
+class A: pass
 
 [case testDefaultArgumentsWithSubtypes]
 import typing
-def f(x: 'B' = A()) -> None: # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+def f(x: 'B' = A()) -> None: # E: Incompatible default for argument "x" (default has type "A", argument has type "B")
     pass
 def g(x: 'A' = B()) -> None:
     pass
@@ -409,7 +446,7 @@ class B(A): pass
 
 [case testMultipleDefaultArgumentExpressions]
 import typing
-def f(x: 'A' = B(), y: 'B' = B()) -> None: # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+def f(x: 'A' = B(), y: 'B' = B()) -> None: # E: Incompatible default for argument "x" (default has type "B", argument has type "A")
     pass
 def h(x: 'A' = A(), y: 'B' = B()) -> None:
     pass
@@ -420,7 +457,7 @@ class B: pass
 
 [case testMultipleDefaultArgumentExpressions2]
 import typing
-def g(x: 'A' = A(), y: 'B' = A()) -> None: # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+def g(x: 'A' = A(), y: 'B' = A()) -> None: # E: Incompatible default for argument "y" (default has type "A", argument has type "B")
     pass
 
 class A: pass
@@ -518,7 +555,7 @@ class A(Generic[t]):
     g = f
 a = None # type: A[B]
 a.g(B())
-a.g(a)   # E: Argument 1 has incompatible type A[B]; expected "B"
+a.g(a)   # E: Argument 1 has incompatible type "A[B]"; expected "B"
 
 [case testInvalidMethodAsDataAttributeInGenericClass]
 from typing import Any, TypeVar, Generic, Callable
@@ -639,8 +676,8 @@ def f() -> None:
         g(1)
         g.x # E
 [out]
-main:7: error: Callable[..., Any] has no attribute "x"
-main:11: error: Callable[..., Any] has no attribute "x"
+main:7: error: "Callable[..., Any]" has no attribute "x"
+main:11: error: "Callable[..., Any]" has no attribute "x"
 
 [case testNestedGenericFunctions]
 from typing import TypeVar
@@ -748,7 +785,7 @@ f(None) # E: Too many arguments for "f"
 from typing import Any, Callable
 def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass
 def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
- at dec1 # E: Argument 1 to "dec2" has incompatible type Callable[[Any], Any]; expected Callable[[Any, Any], None]
+ at dec1 # E: Argument 1 to "dec2" has incompatible type "Callable[[Any], Any]"; expected "Callable[[Any, Any], None]"
 @dec2
 def f(x): pass
 
@@ -756,7 +793,7 @@ def f(x): pass
 from typing import Any, Callable
 def dec1(f: Callable[[Any, Any], None]) -> Callable[[], None]: pass
 def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
- at dec1 # E: Argument 1 to "dec1" has incompatible type Callable[[Any], None]; expected Callable[[Any, Any], None]
+ at dec1 # E: Argument 1 to "dec1" has incompatible type "Callable[[Any], None]"; expected "Callable[[Any, Any], None]"
 @dec2
 def f(x, y): pass
 
@@ -1239,6 +1276,56 @@ if x:
 else:
     def f(x: int = 0) -> None: pass # E: All conditional function variants must have identical signatures
 
+[case testConditionalFunctionDefinitionUsingDecorator1]
+from typing import Callable
+
+def dec(f) -> Callable[[int], None]: pass
+
+x = int()
+if x:
+    @dec
+    def f(): pass
+else:
+    def f(x: int) -> None: pass
+
+[case testConditionalFunctionDefinitionUsingDecorator2]
+from typing import Callable
+
+def dec(f) -> Callable[[int], None]: pass
+
+x = int()
+if x:
+    @dec
+    def f(): pass
+else:
+    def f(x: str) -> None: pass # E: Incompatible redefinition (redefinition with type "Callable[[str], None]", original type "Callable[[int], None]")
+
+[case testConditionalFunctionDefinitionUsingDecorator3]
+from typing import Callable
+
+def dec(f) -> Callable[[int], None]: pass
+
+x = int()
+if x:
+    def f(x: int) -> None: pass
+else:
+    # TODO: This should be okay.
+    @dec # E: Name 'f' already defined
+    def f(): pass
+
+[case testConditionalFunctionDefinitionUsingDecorator4]
+from typing import Callable
+
+def dec(f) -> Callable[[int], None]: pass
+
+x = int()
+if x:
+    def f(x: str) -> None: pass
+else:
+    # TODO: We should report an incompatible redefinition.
+    @dec # E: Name 'f' already defined
+    def f(): pass
+
 [case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1]
 from typing import Any
 def f(x: str) -> None: pass
@@ -1285,7 +1372,7 @@ def g() -> None:
 def g(): pass
 f = g
 if g():
-    def f(x): pass  # E: Incompatible redefinition (redefinition with type Callable[[Any], Any], original type Callable[[], Any])
+    def f(x): pass  # E: Incompatible redefinition (redefinition with type "Callable[[Any], Any]", original type "Callable[[], Any]")
 
 [case testRedefineFunctionDefinedAsVariableWithVariance1]
 class B: pass
@@ -1480,7 +1567,7 @@ L = Callable[[Arg(name='x', type=int)], int] # ok
 # I have commented out the following test because I don't know how to expect the "defined here" note part of the error.
 # M = Callable[[Arg(gnome='x', type=int)], int]   E: Invalid type alias   E: Unexpected keyword argument "gnome" for "Arg"
 N = Callable[[Arg(name=None, type=int)], int] # ok
-O = Callable[[List[Arg(int)]], int]  # E: Invalid type alias  # E: Value of type "int" is not indexable  # E: Type expected within [...] # E: The type Type[List[Any]] is not generic and not indexable
+O = Callable[[List[Arg(int)]], int]  # E: Invalid type alias  # E: Value of type "int" is not indexable  # E: Type expected within [...] # E: The type "Type[List[Any]]" is not generic and not indexable
 P = Callable[[mypy_extensions.VarArg(int)], int] # ok
 Q = Callable[[Arg(int, type=int)], int]  # E: Invalid type alias  # E: Value of type "int" is not indexable  # E: "Arg" gets multiple values for keyword argument "type"
 R = Callable[[Arg(int, 'x', name='y')], int]  # E: Invalid type alias  # E: Value of type "int" is not indexable  # E: "Arg" gets multiple values for keyword argument "name"
@@ -1523,7 +1610,7 @@ def f2(*args, **kwargs) -> int: pass
 d(f1)
 e(f2)
 d(f2)
-e(f1)  # E: Argument 1 to "e" has incompatible type Callable[[VarArg(Any)], int]; expected Callable[[VarArg(Any), KwArg(Any)], int]
+e(f1)  # E: Argument 1 to "e" has incompatible type "Callable[[VarArg(Any)], int]"; expected "Callable[[VarArg(Any), KwArg(Any)], int]"
 
 [builtins fixtures/dict.pyi]
 
@@ -1644,12 +1731,12 @@ def isf_unnamed(__i: int, __s: str) -> str:
 
 int_str_fun = isf
 int_str_fun = isf_unnamed
-int_named_str_fun = isf_unnamed # E: Incompatible types in assignment (expression has type Callable[[int, str], str], variable has type Callable[[int, Arg(str, 's')], str])
+int_named_str_fun = isf_unnamed # E: Incompatible types in assignment (expression has type "Callable[[int, str], str]", variable has type "Callable[[int, Arg(str, 's')], str]")
 int_opt_str_fun = iosf
 int_str_fun = iosf
-int_opt_str_fun = isf # E: Incompatible types in assignment (expression has type Callable[[Arg(int, 'ii'), Arg(str, 'ss')], str], variable has type Callable[[int, DefaultArg(str)], str])
+int_opt_str_fun = isf # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'ii'), Arg(str, 'ss')], str]", variable has type "Callable[[int, DefaultArg(str)], str]")
 
-int_named_str_fun = isf # E: Incompatible types in assignment (expression has type Callable[[Arg(int, 'ii'), Arg(str, 'ss')], str], variable has type Callable[[int, Arg(str, 's')], str])
+int_named_str_fun = isf # E: Incompatible types in assignment (expression has type "Callable[[Arg(int, 'ii'), Arg(str, 'ss')], str]", variable has type "Callable[[int, Arg(str, 's')], str]")
 int_named_str_fun = iosf
 
 [builtins fixtures/dict.pyi]
@@ -1681,7 +1768,7 @@ f(x=4) + '' # E: Unsupported operand types for + ("int" and "str")
 [case testCallableWithArbitraryArgsInErrorMessage]
 from typing import Callable
 def f(x: Callable[..., int]) -> None:
-    x = 1  # E: Incompatible types in assignment (expression has type "int", variable has type Callable[..., int])
+    x = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[..., int]")
 [out]
 
 [case testCallableWithArbitraryArgsInGenericFunction]
@@ -1703,7 +1790,7 @@ def g4(*, y: int) -> str: pass
 f(g1)
 f(g2)
 f(g3)
-f(g4) # E: Argument 1 to "f" has incompatible type Callable[[NamedArg(int, 'y')], str]; expected Callable[..., int]
+f(g4) # E: Argument 1 to "f" has incompatible type "Callable[[NamedArg(int, 'y')], str]"; expected "Callable[..., int]"
 
 [case testCallableWithArbitraryArgsSubtypingWithGenericFunc]
 from typing import Callable, TypeVar
@@ -1736,7 +1823,7 @@ f(x=1, y="hello", z=[])
 from typing import Dict
 def f(x, **kwargs): # type: (...) -> None
     success_dict_type = kwargs # type: Dict[str, str]
-    failure_dict_type = kwargs # type: Dict[int, str] # E: Incompatible types in assignment (expression has type Dict[str, Any], variable has type Dict[int, str])
+    failure_dict_type = kwargs # type: Dict[int, str] # E: Incompatible types in assignment (expression has type "Dict[str, Any]", variable has type "Dict[int, str]")
 f(1, thing_in_kwargs=["hey"])
 [builtins fixtures/dict.pyi]
 [out]
@@ -1745,7 +1832,7 @@ f(1, thing_in_kwargs=["hey"])
 from typing import Tuple, Any
 def f(x, *args): # type: (...) -> None
     success_tuple_type = args # type: Tuple[Any, ...]
-    fail_tuple_type = args # type: None # E: Incompatible types in assignment (expression has type Tuple[Any, ...], variable has type None)
+    fail_tuple_type = args # type: None # E: Incompatible types in assignment (expression has type "Tuple[Any, ...]", variable has type "None")
 f(1, "hello")
 [builtins fixtures/tuple.pyi]
 [out]
@@ -1832,8 +1919,8 @@ def g(x, y): pass
 def h(x): pass
 def j(y) -> Any: pass
 f = h
-f = j # E: Incompatible types in assignment (expression has type Callable[[Arg(Any, 'y')], Any], variable has type Callable[[Arg(Any, 'x')], Any])
-f = g # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[Any], Any])
+f = j # E: Incompatible types in assignment (expression has type "Callable[[Arg(Any, 'y')], Any]", variable has type "Callable[[Arg(Any, 'x')], Any]")
+f = g # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[Any], Any]")
 
 [case testRedefineFunction2]
 def f() -> None: pass
@@ -2074,23 +2161,23 @@ def fn(
 from typing import Union, Dict, List
 def f() -> List[Union[str, int]]:
     x = ['a']
-    return x # E: Incompatible return value type (got List[str], expected List[Union[str, int]]) \
-# N: Perhaps you need a type annotation for "x"? Suggestion: List[Union[str, int]]
+    return x # E: Incompatible return value type (got "List[str]", expected "List[Union[str, int]]") \
+# N: Perhaps you need a type annotation for "x"? Suggestion: "List[Union[str, int]]"
 
 def g() -> Dict[str, Union[str, int]]:
     x = {'a': 'a'}
-    return x # E: Incompatible return value type (got Dict[str, str], expected Dict[str, Union[str, int]]) \
-# N: Perhaps you need a type annotation for "x"? Suggestion: Dict[str, Union[str, int]]
+    return x # E: Incompatible return value type (got "Dict[str, str]", expected "Dict[str, Union[str, int]]") \
+# N: Perhaps you need a type annotation for "x"? Suggestion: "Dict[str, Union[str, int]]"
 
 def h() -> Dict[Union[str, int], str]:
     x = {'a': 'a'}
-    return x # E: Incompatible return value type (got Dict[str, str], expected Dict[Union[str, int], str]) \
-# N: Perhaps you need a type annotation for "x"? Suggestion: Dict[Union[str, int], str]
+    return x # E: Incompatible return value type (got "Dict[str, str]", expected "Dict[Union[str, int], str]") \
+# N: Perhaps you need a type annotation for "x"? Suggestion: "Dict[Union[str, int], str]"
 
 def i() -> List[Union[int, float]]:
     x: List[int] = [1]
-    return x # E: Incompatible return value type (got List[int], expected List[Union[int, float]]) \
-# N: Perhaps you need a type annotation for "x"? Suggestion: List[Union[int, float]]
+    return x # E: Incompatible return value type (got "List[int]", expected "List[Union[int, float]]") \
+# N: Perhaps you need a type annotation for "x"? Suggestion: "List[Union[int, float]]"
 
 [builtins fixtures/dict.pyi]
 
@@ -2098,11 +2185,11 @@ def i() -> List[Union[int, float]]:
 from typing import Union, List
 def f() -> List[Union[int, float]]:
     x = ['a']
-    return x # E: Incompatible return value type (got List[str], expected List[Union[int, float]])
+    return x # E: Incompatible return value type (got "List[str]", expected "List[Union[int, float]]")
 
 def g() -> List[Union[str, int]]:
     x = ('a', 2)
-    return x # E: Incompatible return value type (got "Tuple[str, int]", expected List[Union[str, int]])
+    return x # E: Incompatible return value type (got "Tuple[str, int]", expected "List[Union[str, int]]")
 
 [builtins fixtures/list.pyi]
 
@@ -2110,7 +2197,7 @@ def g() -> List[Union[str, int]]:
 from typing import Union, Dict, List
 def f() -> Dict[str, Union[str, int]]:
     x = {'a': 'a', 'b': 2}
-    return x # E: Incompatible return value type (got Dict[str, object], expected Dict[str, Union[str, int]])
+    return x # E: Incompatible return value type (got "Dict[str, object]", expected "Dict[str, Union[str, int]]")
 
 def g() -> Dict[str, Union[str, int]]:
     x: Dict[str, Union[str, int]] = {'a': 'a', 'b': 2}
@@ -2118,7 +2205,7 @@ def g() -> Dict[str, Union[str, int]]:
 
 def h() -> List[Union[str, int]]:
     x = ['a', 2]
-    return x # E: Incompatible return value type (got List[object], expected List[Union[str, int]])
+    return x # E: Incompatible return value type (got "List[object]", expected "List[Union[str, int]]")
 
 def i() -> List[Union[str, int]]:
     x: List[Union[str, int]] = ['a', 2]
diff --git a/test-data/unit/check-generic-subtyping.test b/test-data/unit/check-generic-subtyping.test
index 6cae196..9c5d242 100644
--- a/test-data/unit/check-generic-subtyping.test
+++ b/test-data/unit/check-generic-subtyping.test
@@ -13,9 +13,9 @@ ac = None # type: A[C]
 ad = None # type: A[D]
 b = None # type: B
 
-b = ad # E: Incompatible types in assignment (expression has type A[D], variable has type "B")
-ad = b # E: Incompatible types in assignment (expression has type "B", variable has type A[D])
-b = ac # E: Incompatible types in assignment (expression has type A[C], variable has type "B")
+b = ad # E: Incompatible types in assignment (expression has type "A[D]", variable has type "B")
+ad = b # E: Incompatible types in assignment (expression has type "B", variable has type "A[D]")
+b = ac # E: Incompatible types in assignment (expression has type "A[C]", variable has type "B")
 
 b = b
 ac = b
@@ -32,10 +32,10 @@ a = None # type: A
 bc = None # type: B[C]
 bd = None # type: B[D]
 
-bc = bd # E: Incompatible types in assignment (expression has type B[D], variable has type B[C])
-bd = bc # E: Incompatible types in assignment (expression has type B[C], variable has type B[D])
-bc = a  # E: Incompatible types in assignment (expression has type "A", variable has type B[C])
-bd = a  # E: Incompatible types in assignment (expression has type "A", variable has type B[D])
+bc = bd # E: Incompatible types in assignment (expression has type "B[D]", variable has type "B[C]")
+bd = bc # E: Incompatible types in assignment (expression has type "B[C]", variable has type "B[D]")
+bc = a  # E: Incompatible types in assignment (expression has type "A", variable has type "B[C]")
+bd = a  # E: Incompatible types in assignment (expression has type "A", variable has type "B[D]")
 
 a = bc
 a = bd
@@ -54,10 +54,10 @@ ad = None # type: A[D]
 bcc = None # type: B[C, C]
 bdc = None # type: B[D, C]
 
-ad = bcc # E: Incompatible types in assignment (expression has type B[C, C], variable has type A[D])
-ad = bdc # E: Incompatible types in assignment (expression has type B[D, C], variable has type A[D])
-bcc = ac # E: Incompatible types in assignment (expression has type A[C], variable has type B[C, C])
-bdc = ac # E: Incompatible types in assignment (expression has type A[C], variable has type B[D, C])
+ad = bcc # E: Incompatible types in assignment (expression has type "B[C, C]", variable has type "A[D]")
+ad = bdc # E: Incompatible types in assignment (expression has type "B[D, C]", variable has type "A[D]")
+bcc = ac # E: Incompatible types in assignment (expression has type "A[C]", variable has type "B[C, C]")
+bdc = ac # E: Incompatible types in assignment (expression has type "A[C]", variable has type "B[D, C]")
 
 bcc = bcc
 bdc = bdc
@@ -82,8 +82,8 @@ cef = None # type: C[E, F]
 cff = None # type: C[F, F]
 cfe = None # type: C[F, E]
 
-ae = cef # E: Incompatible types in assignment (expression has type C[E, F], variable has type A[A[E]])
-af = cfe # E: Incompatible types in assignment (expression has type C[F, E], variable has type A[A[F]])
+ae = cef # E: Incompatible types in assignment (expression has type "C[E, F]", variable has type "A[A[E]]")
+af = cfe # E: Incompatible types in assignment (expression has type "C[F, E]", variable has type "A[A[F]]")
 
 ae = cfe
 af = cef
@@ -280,7 +280,7 @@ a = None # type: A
 bc = None # type: B[C]
 bd = None # type: B[D]
 
-a = bc # E: Incompatible types in assignment (expression has type B[C], variable has type "A")
+a = bc # E: Incompatible types in assignment (expression has type "B[C]", variable has type "A")
 bc = a
 bd = a
 
@@ -305,8 +305,8 @@ a = None # type: A
 c = None # type: C
 bc = None # type: B[C]
 
-a.x = c # E: Incompatible types in assignment (expression has type "C", variable has type B[Any])
-a.f(c)  # E: Argument 1 to "f" of "B" has incompatible type "C"; expected B[Any]
+a.x = c # E: Incompatible types in assignment (expression has type "C", variable has type "B[Any]")
+a.f(c)  # E: Argument 1 to "f" of "B" has incompatible type "C"; expected "B[Any]"
 a.x = bc
 a.f(bc)
 [out]
@@ -325,8 +325,8 @@ class B(Generic[T]):
 
 class A(B):
   def g(self) -> None:
-    self.x = c # E: Incompatible types in assignment (expression has type "C", variable has type B[Any])
-    self.f(c)  # E: Argument 1 to "f" of "B" has incompatible type "C"; expected B[Any]
+    self.x = c # E: Incompatible types in assignment (expression has type "C", variable has type "B[Any]")
+    self.f(c)  # E: Argument 1 to "f" of "B" has incompatible type "C"; expected "B[Any]"
     self.x = bc
     self.f(bc)
 
@@ -394,7 +394,7 @@ B(1)
 C(1)
 C('a')  # E: Argument 1 to "C" has incompatible type "str"; expected "int"
 D(A(1))
-D(1)  # E: Argument 1 to "D" has incompatible type "int"; expected A[<nothing>]
+D(1)  # E: Argument 1 to "D" has incompatible type "int"; expected "A[<nothing>]"
 
 
 [case testInheritedConstructor2]
@@ -427,9 +427,9 @@ adc = None # type: A[D, C]
 ic = None # type: I[C]
 id = None # type: I[D]
 
-ic = acd # E: Incompatible types in assignment (expression has type A[C, D], variable has type I[C])
-id = adc # E: Incompatible types in assignment (expression has type A[D, C], variable has type I[D])
-adc = ic # E: Incompatible types in assignment (expression has type I[C], variable has type A[D, C])
+ic = acd # E: Incompatible types in assignment (expression has type "A[C, D]", variable has type "I[C]")
+id = adc # E: Incompatible types in assignment (expression has type "A[D, C]", variable has type "I[D]")
+adc = ic # E: Incompatible types in assignment (expression has type "I[C]", variable has type "A[D, C]")
 
 ic = adc
 id = acd
@@ -451,11 +451,11 @@ class I(Generic[S]): pass
 class B(I[C]): pass
 class A(B): pass
 
-ie = a # E: Incompatible types in assignment (expression has type "A", variable has type I[E])
-a = ic # E: Incompatible types in assignment (expression has type I[C], variable has type "A")
-a = id # E: Incompatible types in assignment (expression has type I[D], variable has type "A")
+ie = a # E: Incompatible types in assignment (expression has type "A", variable has type "I[E]")
+a = ic # E: Incompatible types in assignment (expression has type "I[C]", variable has type "A")
+a = id # E: Incompatible types in assignment (expression has type "I[D]", variable has type "A")
 a = b  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
-id = a # E: Incompatible types in assignment (expression has type "A", variable has type I[D])
+id = a # E: Incompatible types in assignment (expression has type "A", variable has type "I[D]")
 
 ic = a
 b = a
@@ -486,8 +486,8 @@ a, i, j = None, None, None # type: (A[object], I[object], J[object])
 ii = a
 jj = a
 jj = i
-a = i # E: Incompatible types in assignment (expression has type I[object], variable has type A[object])
-a = j # E: Incompatible types in assignment (expression has type J[object], variable has type A[object])
+a = i # E: Incompatible types in assignment (expression has type "I[object]", variable has type "A[object]")
+a = j # E: Incompatible types in assignment (expression has type "J[object]", variable has type "A[object]")
 
 class J(Generic[t]): pass
 class X(metaclass=ABCMeta): pass
@@ -546,7 +546,7 @@ class A(B):
 class C: pass
 class D: pass
 [out]
-main:7: error: Incompatible types in assignment (expression has type "A", variable has type I[D])
+main:7: error: Incompatible types in assignment (expression has type "A", variable has type "I[D]")
 
 [case testSubclassingGenericABCWithDeepHierarchy2]
 from typing import Any, TypeVar, Generic
@@ -704,7 +704,7 @@ a = None  # type: G[A]
 b = None  # type: G[B]
 c = None  # type: G[C]
 
-b = a  # E: Incompatible types in assignment (expression has type G[A], variable has type G[B])
+b = a  # E: Incompatible types in assignment (expression has type "G[A]", variable has type "G[B]")
 b = c
 [builtins fixtures/bool.pyi]
 [out]
@@ -723,7 +723,7 @@ b = None  # type: G[B]
 c = None  # type: G[C]
 
 b = a
-b = c  # E: Incompatible types in assignment (expression has type G[C], variable has type G[B])
+b = c  # E: Incompatible types in assignment (expression has type "G[C]", variable has type "G[B]")
 [builtins fixtures/bool.pyi]
 [out]
 
@@ -740,8 +740,8 @@ a = None  # type: G[A]
 b = None  # type: G[B]
 c = None  # type: G[C]
 
-b = a  # E: Incompatible types in assignment (expression has type G[A], variable has type G[B])
-b = c  # E: Incompatible types in assignment (expression has type G[C], variable has type G[B])
+b = a  # E: Incompatible types in assignment (expression has type "G[A]", variable has type "G[B]")
+b = c  # E: Incompatible types in assignment (expression has type "G[C]", variable has type "G[B]")
 [builtins fixtures/bool.pyi]
 [out]
 
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index bb05e03..6e8fcd4 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -79,8 +79,8 @@ class A(Generic[T]): pass
 class B: pass
 class C(B): pass
 [out]
-main:4: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
-main:5: error: Incompatible types in assignment (expression has type A[C], variable has type A[B])
+main:4: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]")
+main:5: error: Incompatible types in assignment (expression has type "A[C]", variable has type "A[B]")
 
 [case testGenericTypeCompatibilityWithAny]
 from typing import Any, TypeVar, Generic
@@ -115,8 +115,8 @@ class A(Generic[T]):
 class B: pass
 class C: pass
 [out]
-main:7: error: Incompatible types in assignment (expression has type A[C], variable has type A[B])
-main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
+main:7: error: Incompatible types in assignment (expression has type "A[C]", variable has type "A[B]")
+main:8: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]")
 
 [case testMultipleGenericTypeParametersWithMemberVars]
 from typing import TypeVar, Generic
@@ -183,9 +183,9 @@ class A(Generic[S, T]):
 class B: pass
 class C(B):pass
 [out]
-main:8: error: Incompatible types in assignment (expression has type A[B, C], variable has type A[B, B])
-main:9: error: Incompatible types in assignment (expression has type A[C, B], variable has type A[B, B])
-main:10: error: Incompatible types in assignment (expression has type A[B, B], variable has type A[B, C])
+main:8: error: Incompatible types in assignment (expression has type "A[B, C]", variable has type "A[B, B]")
+main:9: error: Incompatible types in assignment (expression has type "A[C, B]", variable has type "A[B, B]")
+main:10: error: Incompatible types in assignment (expression has type "A[B, B]", variable has type "A[B, C]")
 
 
 -- Simple generic type bodies
@@ -208,7 +208,7 @@ x = None # type: B
 class B: pass
 [out]
 main:7: error: Argument 1 to "f" of "A" has incompatible type "B"; expected "T"
-main:8: error: Incompatible types in assignment (expression has type A[T], variable has type A[B])
+main:8: error: Incompatible types in assignment (expression has type "A[T]", variable has type "A[B]")
 
 [case testGenericTypeBodyWithMultipleVariables]
 from typing import TypeVar, Generic
@@ -229,8 +229,8 @@ class B: pass
 [out]
 main:8: error: Incompatible types in assignment (expression has type "T", variable has type "S")
 main:9: error: Incompatible types in assignment (expression has type "S", variable has type "T")
-main:10: error: Incompatible types in assignment (expression has type A[S, T], variable has type A[S, B])
-main:11: error: Incompatible types in assignment (expression has type A[S, T], variable has type A[T, T])
+main:10: error: Incompatible types in assignment (expression has type "A[S, T]", variable has type "A[S, B]")
+main:11: error: Incompatible types in assignment (expression has type "A[S, T]", variable has type "A[T, T]")
 
 [case testCompatibilityOfNoneWithTypeVar]
 from typing import TypeVar, Generic
@@ -284,9 +284,9 @@ class B: pass
 class C: pass
 [out]
 main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B")
-main:9: error: Unsupported operand types for + (A[B, C] and "C")
+main:9: error: Unsupported operand types for + ("A[B, C]" and "C")
 main:10: error: Incompatible types in assignment (expression has type "B", variable has type "C")
-main:11: error: Invalid index type "B" for A[B, C]; expected type "C"
+main:11: error: Invalid index type "B" for "A[B, C]"; expected type "C"
 
 [case testOperatorAssignmentWithIndexLvalue1]
 from typing import TypeVar, Generic
@@ -309,7 +309,7 @@ class C:
     def __add__(self, o: 'C') -> 'C': pass
 [out]
 main:7: error: Unsupported operand types for + ("C" and "B")
-main:8: error: Invalid index type "C" for A[C]; expected type "B"
+main:8: error: Invalid index type "C" for "A[C]"; expected type "B"
 
 [case testOperatorAssignmentWithIndexLvalue2]
 from typing import TypeVar, Generic
@@ -330,9 +330,9 @@ class B: pass
 class C:
     def __add__(self, o: 'C') -> 'C': pass
 [out]
-main:7: error: Invalid index type "B" for A[C]; expected type "C"
-main:8: error: Invalid index type "C" for A[C]; expected type "B"
-main:9: error: Invalid index type "B" for A[C]; expected type "C"
+main:7: error: Invalid index type "B" for "A[C]"; expected type "C"
+main:8: error: Invalid index type "C" for "A[C]"; expected type "B"
+main:9: error: Invalid index type "B" for "A[C]"; expected type "C"
 
 
 -- Nested generic types
@@ -364,8 +364,8 @@ class B:
 class C:
     pass
 [out]
-main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
-main:9: error: Incompatible types in assignment (expression has type A[A[B]], variable has type A[A[C]])
+main:8: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]")
+main:9: error: Incompatible types in assignment (expression has type "A[A[B]]", variable has type "A[A[C]]")
 
 
 -- Generic functions
@@ -384,7 +384,7 @@ def f(s: S, t: T) -> p[T, A]:
     s = t           # E: Incompatible types in assignment (expression has type "T", variable has type "S")
     p_s_a = None  # type: p[S, A]
     if s:
-        return p_s_a # E: Incompatible return value type (got p[S, A], expected p[T, A])
+        return p_s_a # E: Incompatible return value type (got "p[S, A]", expected "p[T, A]")
     b = t # type: T
     c = s # type: S
     p_t_a = None  # type: p[T, A]
@@ -402,10 +402,10 @@ class A(Generic[T]):
         s = t # E: Incompatible types in assignment (expression has type "T", variable has type "S")
         p_s_s = None  # type: p[S, S]
         if s:
-            return p_s_s # E: Incompatible return value type (got p[S, S], expected p[S, T])
+            return p_s_s # E: Incompatible return value type (got "p[S, S]", expected "p[S, T]")
         p_t_t = None  # type: p[T, T]
         if t:
-            return p_t_t # E: Incompatible return value type (got p[T, T], expected p[S, T])
+            return p_t_t # E: Incompatible return value type (got "p[T, T]", expected "p[S, T]")
         t = t
         s = s
         p_s_t = None  # type: p[S, T]
@@ -448,7 +448,7 @@ A[int, str, int]() # E: Type application has too many types (2 expected)
 a = None # type: A
 class A: pass
 a[A]()  # E: Value of type "A" is not indexable
-A[A]()  # E: The type Type[A] is not generic and not indexable
+A[A]()  # E: The type "Type[A]" is not generic and not indexable
 [out]
 
 [case testTypeApplicationArgTypes]
@@ -504,7 +504,7 @@ Alias[int]("a")  # E: Argument 1 to "Node" has incompatible type "str"; expected
 [out]
 
 [case testTypeApplicationCrash]
-type[int] # this was crashing, see #2302 (comment)  # E: The type Type[type] is not generic and not indexable
+type[int] # this was crashing, see #2302 (comment)  # E: The type "Type[type]" is not generic and not indexable
 [out]
 
 
@@ -564,7 +564,7 @@ def func(x: IntNode[T]) -> IntNode[T]:
     return x
 reveal_type(func) # E: Revealed type is 'def [T] (x: __main__.Node[builtins.int, T`-1]) -> __main__.Node[builtins.int, T`-1]'
 
-func(1) # E: Argument 1 to "func" has incompatible type "int"; expected Node[int, <nothing>]
+func(1) # E: Argument 1 to "func" has incompatible type "int"; expected "Node[int, <nothing>]"
 func(Node('x', 1)) # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
 reveal_type(func(Node(1, 'x'))) # E: Revealed type is '__main__.Node[builtins.int, builtins.str*]'
 
@@ -693,7 +693,7 @@ l.meth().append(1)
 reveal_type(l.meth()) # E: Revealed type is 'builtins.list*[builtins.int]'
 l.meth().append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
 
-ListedNode[str]([]).x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type List[str])
+ListedNode[str]([]).x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "List[str]")
 
 [builtins fixtures/list.pyi]
 
@@ -715,7 +715,7 @@ def f_bad(x: T) -> D[T]:
     return D(1)  # Error, see out
 
 L[int]().append(Node((1, 1)))
-L[int]().append(5) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected Node[Tuple[int, int]]
+L[int]().append(5) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "Node[Tuple[int, int]]"
 
 x = D((1, 1)) # type: D[int]
 y = D(5) # type: D[int] # E: Argument 1 to "D" has incompatible type "int"; expected "Tuple[int, int]"
@@ -779,7 +779,7 @@ if not isinstance(s, str):
 
 z = None # type: TNode # Same as TNode[Any]
 z.x
-z.foo() # E: Item Node[int] of "Union[Any, Node[int]]" has no attribute "foo"
+z.foo() # E: Item "Node[int]" of "Union[Any, Node[int]]" has no attribute "foo"
 
 [builtins fixtures/isinstance.pyi]
 
@@ -825,9 +825,9 @@ reveal_type(make_cb(1)) # E: Revealed type is 'def (*Any, **Any) -> builtins.int
 def use_cb(arg: T, cb: C2[T]) -> Node[T]:
     return cb(arg, arg)
 
-use_cb(1, 1) # E: Argument 2 to "use_cb" has incompatible type "int"; expected Callable[[int, int], Node[int]]
+use_cb(1, 1) # E: Argument 2 to "use_cb" has incompatible type "int"; expected "Callable[[int, int], Node[int]]"
 my_cb = None # type: C2[int]
-use_cb('x', my_cb) # E: Argument 2 to "use_cb" has incompatible type Callable[[int, int], Node[int]]; expected Callable[[str, str], Node[str]]
+use_cb('x', my_cb) # E: Argument 2 to "use_cb" has incompatible type "Callable[[int, int], Node[int]]"; expected "Callable[[str, str], Node[str]]"
 reveal_type(use_cb(1, my_cb)) # E: Revealed type is '__main__.Node[builtins.int]'
 
 [out]
@@ -848,11 +848,11 @@ def fun2(v: Vec[T], scale: T) -> Vec[T]:
     return v
 
 reveal_type(fun1([(1, 1)])) # E: Revealed type is 'builtins.int*'
-fun1(1) # E: Argument 1 to "fun1" has incompatible type "int"; expected List[Tuple[int, int]]
+fun1(1) # E: Argument 1 to "fun1" has incompatible type "int"; expected "List[Tuple[int, int]]"
 fun1([(1, 'x')]) # E: Cannot infer type argument 1 of "fun1"
 
 reveal_type(fun2([(1, 1)], 1)) # E: Revealed type is 'builtins.list[Tuple[builtins.int*, builtins.int*]]'
-fun2([('x', 'x')], 'x') # E: Type argument 1 of "fun2" has incompatible value "str"
+fun2([('x', 'x')], 'x') # E: Value of type variable "T" of "fun2" cannot be "str"
 
 [builtins fixtures/list.pyi]
 
@@ -869,7 +869,7 @@ n.y = 'x' # E: Incompatible types in assignment (expression has type "str", vari
 def f(x: Node[T, T]) -> TupledNode[T]:
     return Node(x.x, (x.x, x.x))
 
-f(1) # E: Argument 1 to "f" has incompatible type "int"; expected Node[<nothing>, <nothing>]
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Node[<nothing>, <nothing>]"
 f(Node(1, 'x')) # E: Cannot infer type argument 1 of "f"
 reveal_type(Node('x', 'x')) # E: Revealed type is 'a.Node[builtins.str*, builtins.str*]'
 
@@ -978,7 +978,7 @@ class C:
     c: Type[object] = Iterable[int]  # This is however also a variable
     a = B
     b = int  # E: Cannot assign multiple types to name "b" without an explicit "Type[...]" annotation \
-             # E: Incompatible types in assignment (expression has type Type[int], variable has type "Type alias to Union")
+             # E: Incompatible types in assignment (expression has type "Type[int]", variable has type "Type alias to Union")
     c = int
     def f(self, x: a) -> None: pass  # E: Invalid type "__main__.C.a"
     def g(self, x: b) -> None: pass
@@ -1068,9 +1068,9 @@ y = None # type: SameA[str] # Two errors here, for both args of A
 
 [builtins fixtures/list.pyi]
 [out]
-main:9:7: error: Type argument 1 of "A" has incompatible value "str"
-main:13: error: Type argument 1 of "A" has incompatible value "str"
-main:13: error: Type argument 2 of "A" has incompatible value "str"
+main:9:7: error: Value of type variable "T" of "A" cannot be "str"
+main:13: error: Value of type variable "T" of "A" cannot be "str"
+main:13: error: Value of type variable "S" of "A" cannot be "str"
 
 [case testGenericTypeAliasesIgnoredPotentialAlias]
 class A: ...
@@ -1108,7 +1108,7 @@ from m import Alias
 
 n = Alias[int]([1])
 reveal_type(n)  # E: Revealed type is 'm.Node[builtins.list*[builtins.int]]'
-bad = Alias[str]([1])  # E: List item 0 has incompatible type "int"
+bad = Alias[str]([1])  # E: List item 0 has incompatible type "int"; expected "str"
 
 n2 = Alias([1]) # Same as Node[List[Any]]
 reveal_type(n2)  # E: Revealed type is 'm.Node[builtins.list*[Any]]'
@@ -1201,7 +1201,7 @@ reveal_type(D[str, int]().c()) # E: Revealed type is 'builtins.str*'
 from typing import TypeVar, Generic
 T = TypeVar('T')
 
-class A(Generic[T, T]): # E: Duplicate type variables in Generic[...]
+class A(Generic[T, T]): # E: Duplicate type variables in Generic[...] or Protocol[...]
     pass
 
 a = A[int]()
@@ -1218,7 +1218,7 @@ class A(Generic[T]):
 class B(Generic[T]):
     pass
 
-class C(A[T], B[S], Generic[T]): # E: If Generic[...] is present it should list all type variables
+class C(A[T], B[S], Generic[T]): # E: If Generic[...] or Protocol[...] is present it should list all type variables
     pass
 
 c = C[int, str]()
@@ -1343,7 +1343,7 @@ Z = TypeVar('Z')
 class OO: pass
 a = None # type: A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]
 
-f(a) # E: Argument 1 to "f" has incompatible type A[...]; expected "OO"
+f(a) # E: Argument 1 to "f" has incompatible type "A[...]"; expected "OO"
 
 def f(a: OO) -> None:
     pass
@@ -1354,7 +1354,7 @@ from typing import TypeVar, Generic
 S = TypeVar('S')
 T = TypeVar('T')
 a = None # type: A[object, B]
-f(a) # E: Argument 1 to "f" has incompatible type A[object, B]; expected "B"
+f(a) # E: Argument 1 to "f" has incompatible type "A[object, B]"; expected "B"
 
 def f(a: 'B') -> None: pass
 class A(Generic[S, T]): pass
@@ -1365,7 +1365,7 @@ from typing import Callable, TypeVar, Generic
 S = TypeVar('S')
 T = TypeVar('T')
 a = None # type: A[object, Callable[[], None]]
-f(a) # E: Argument 1 to "f" has incompatible type A[object, Callable[[], None]]; expected "B"
+f(a) # E: Argument 1 to "f" has incompatible type "A[object, Callable[[], None]]"; expected "B"
 
 def f(a: 'B') -> None: pass
 class A(Generic[S, T]): pass
@@ -1390,7 +1390,7 @@ def f(a: List[A]) -> A: pass
 def f(a: B) -> B: pass
 
 b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
-a = f([b]) # E: List item 0 has incompatible type "B"
+a = f([b]) # E: List item 0 has incompatible type "B"; expected "A"
 a = f(b)   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
 
 a = f([a])
@@ -1499,25 +1499,25 @@ y1 = f1
 y1 = f1
 y1 = f2
 y1 = f3
-y1 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[A], A])
+y1 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[A], A]")
 
 y2 = f2
 y2 = f2
-y2 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], B])
-y2 = f3 # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[A], B])
-y2 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[A], B])
+y2 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], B]")
+y2 = f3 # E: Incompatible types in assignment (expression has type "Callable[[B], B]", variable has type "Callable[[A], B]")
+y2 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[A], B]")
 
 y3 = f3
 y3 = f3
 y3 = f1
 y3 = f2
-y3 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[B], B])
+y3 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[B], B]")
 
 y4 = f4
 y4 = f4
-y4 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[int], A])
+y4 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[int], A]")
 y4 = f2
-y4 = f3 # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[int], A])
+y4 = f3 # E: Incompatible types in assignment (expression has type "Callable[[B], B]", variable has type "Callable[[int], A]")
 
 [case testSubtypingWithGenericInnerFunctions]
 from typing import TypeVar
@@ -1533,24 +1533,24 @@ def outer(t: T) -> None:
 
     y1 = f1
     y1 = f2
-    y1 = f3 # E: Incompatible types in assignment (expression has type Callable[[T], A], variable has type Callable[[A], A])
-    y1 = f4 # E: Incompatible types in assignment (expression has type Callable[[A], T], variable has type Callable[[A], A])
-    y1 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[A], A])
+    y1 = f3 # E: Incompatible types in assignment (expression has type "Callable[[T], A]", variable has type "Callable[[A], A]")
+    y1 = f4 # E: Incompatible types in assignment (expression has type "Callable[[A], T]", variable has type "Callable[[A], A]")
+    y1 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[A], A]")
 
     y2 = f2
-    y2 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], B])
+    y2 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], B]")
 
     y3 = f3
-    y3 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[T], A])
+    y3 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[T], A]")
     y3 = f2
-    y3 = f4 # E: Incompatible types in assignment (expression has type Callable[[A], T], variable has type Callable[[T], A])
-    y3 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], A])
+    y3 = f4 # E: Incompatible types in assignment (expression has type "Callable[[A], T]", variable has type "Callable[[T], A]")
+    y3 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[T], A]")
 
     y4 = f4
-    y4 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], T])
+    y4 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], T]")
     y4 = f2
-    y4 = f3 # E: Incompatible types in assignment (expression has type Callable[[T], A], variable has type Callable[[A], T])
-    y4 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[A], T])
+    y4 = f3 # E: Incompatible types in assignment (expression has type "Callable[[T], A]", variable has type "Callable[[A], T]")
+    y4 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[A], T]")
 
     y5 = f5
     y5 = f1
@@ -1568,8 +1568,8 @@ g1(f)
 def g2(f: Callable[[int], int]) -> None: pass
 g2(f)
 def g3(f: Callable[[object], object]) -> None: pass
-g3(f) # E: Argument 1 to "g3" has incompatible type Callable[[T], T]; \
-           expected Callable[[object], object]
+g3(f) # E: Argument 1 to "g3" has incompatible type "Callable[[T], T]"; \
+           expected "Callable[[object], object]"
 
 [case testSubtypingWithGenericFunctionUsingTypevarWithValues2-skip]
 from typing import TypeVar, Callable
@@ -1626,7 +1626,7 @@ T = TypeVar('T')
 class C(Generic[T]):
     def __init__(self) -> None: pass
 x = C # type: Callable[[], C[int]]
-y = C # type: Callable[[], int] # E: Incompatible types in assignment (expression has type Type[C[Any]], variable has type Callable[[], int])
+y = C # type: Callable[[], int] # E: Incompatible types in assignment (expression has type "Type[C[Any]]", variable has type "Callable[[], int]")
 
 
 -- Special cases
diff --git a/test-data/unit/check-ignore.test b/test-data/unit/check-ignore.test
index 3c81bd0..ad183ac 100644
--- a/test-data/unit/check-ignore.test
+++ b/test-data/unit/check-ignore.test
@@ -194,7 +194,7 @@ foo(Child())
 def bar(x: Base[str, str]) -> None: pass
 bar(Child())
 [out]
-main:19: error: Argument 1 to "bar" has incompatible type "Child"; expected Base[str, str]
+main:19: error: Argument 1 to "bar" has incompatible type "Child"; expected "Base[str, str]"
 
 [case testTypeIgnoreLineNumberWithinFile]
 import m
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index 557d42a..1d3ee9a 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -1501,6 +1501,99 @@ class MyClass:
 [rechecked]
 [stale]
 
+[case testIncrementalWorksWithBasicProtocols]
+import a
+[file a.py]
+from b import P
+
+x: int
+y: P[int]
+x = y.meth()
+
+class C:
+    def meth(self) -> int:
+        pass
+y = C()
+
+[file a.py.2]
+from b import P
+
+x: str
+y: P[str]
+x = y.meth()
+
+class C:
+    def meth(self) -> str:
+        pass
+y = C()
+[file b.py]
+from typing import Protocol, TypeVar
+
+T = TypeVar('T', covariant=True)
+class P(Protocol[T]):
+    def meth(self) -> T:
+        pass
+
+[case testIncrementalSwitchFromNominalToStructural]
+import a
+[file a.py]
+from b import B, fun
+class C(B):
+    def x(self) -> int: pass
+    def y(self) -> int: pass
+fun(C())
+
+[file b.py]
+from typing import Protocol
+class B:
+    def x(self) -> float: pass
+def fun(arg: B) -> None:
+    arg.x()
+
+[file b.py.2]
+from typing import Protocol
+class B(Protocol):
+    def x(self) -> float: pass
+def fun(arg: B) -> None:
+    arg.x()
+
+[file a.py.3]
+from b import fun
+class C:
+    def x(self) -> int: pass
+    def y(self) -> int: pass
+fun(C())
+[out1]
+[out2]
+[out3]
+
+[case testIncrementalSwitchFromStructuralToNominal]
+import a
+[file a.py]
+from b import fun
+class C:
+    def x(self) -> int: pass
+    def y(self) -> int: pass
+fun(C())
+
+[file b.py]
+from typing import Protocol
+class B(Protocol):
+    def x(self) -> float: pass
+def fun(arg: B) -> None:
+    arg.x()
+
+[file b.py.2]
+from typing import Protocol
+class B:
+    def x(self) -> float: pass
+def fun(arg: B) -> None:
+    arg.x()
+
+[out1]
+[out2]
+tmp/a.py:5: error: Argument 1 to "fun" has incompatible type "C"; expected "B"
+
 [case testIncrementalWorksWithNamedTuple]
 import foo
 
@@ -2840,3 +2933,234 @@ foo.bar(b"test")
 [out]
 [out2]
 tmp/mod.py:7: error: Revealed type is 'builtins.bytes'
+
+[case testIncrementalWithSilentImports]
+# cmd: mypy -m a
+# cmd2: mypy -m b
+# flags: --follow-imports=silent
+# flags2: --follow-imports=silent
+[file a.py]
+import b
+
+b.foo(1, 2)
+
+[file b.py]
+def foo(a: int, b: int) -> str:
+    return a + b
+
+[out1]
+[out2]
+tmp/b.py:2: error: Incompatible return value type (got "int", expected "str")
+
+[case testForwardNamedTupleToUnionWithOtherNamedTUple]
+from typing import NamedTuple, Union
+
+class Person(NamedTuple):
+    name: Union[str, "Pair"]
+
+class Pair(NamedTuple):
+    first: str
+    last: str
+
+Person(name=Pair(first="John", last="Doe"))
+[out]
+
+-- Some crazy selef-referential named tuples, types dicts, and aliases
+-- to be sure that everything can be _serialized_ (i.e. ForwardRef's are removed).
+-- For this reason errors are silenced (tests with # type: ignore have equivalents in other files)
+
+[case testForwardTypeAliasInBase1]
+from typing import List
+class C(List['A']):
+    pass
+
+A = List[int]
+x: int = C()[0][0]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testForwardTypeAliasInBase2]
+from typing import List, Generic, TypeVar, NamedTuple
+T = TypeVar('T')
+
+class C(A, B): #type: ignore
+    pass
+class G(Generic[T]): pass
+A = G[C]
+class B(NamedTuple):
+    x: int
+
+C().x
+C()[0]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testSerializeRecursiveAliases1]
+from typing import Type, Callable, Union
+
+A = Union[A, int]  # type: ignore
+B = Callable[[B], int] # type: ignore
+C = Type[C] # type: ignore
+[out]
+
+[case testSerializeRecursiveAliases2]
+from typing import Type, Callable, Union
+
+A = Union[B, int]  # type: ignore
+B = Callable[[C], int] # type: ignore
+C = Type[A] # type: ignore
+[out]
+
+[case testSerializeRecursiveAliases3]
+from typing import Type, Callable, Union, NamedTuple
+
+A = Union[B, int]  # type: ignore
+B = Callable[[C], int] # type: ignore
+class C(NamedTuple): # type: ignore
+    x: A
+[out]
+
+[case testGenericTypeAliasesForwardAnyIncremental1]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+IntNode = Node[int, S]
+AnyNode = Node[S, T]
+
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        self.x = x
+        self.y = y
+
+def output() -> IntNode[str]:
+    return Node(1, 'x')
+x = output() # type: IntNode
+
+y = None # type: IntNode
+y.x = 1
+y.y = 1
+y.y = 'x'
+
+z = Node(1, 'x') # type: AnyNode
+[out]
+
+[case testGenericTypeAliasesForwardAnyIncremental2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        self.x = x
+        self.y = y
+
+def output() -> IntNode[str]:
+    return Node(1, 'x')
+x = output() # type: IntNode
+
+y = None # type: IntNode
+y.x = 1
+y.y = 1
+y.y = 'x'
+
+z = Node(1, 'x') # type: AnyNode
+IntNode = Node[int, S]
+AnyNode = Node[S, T]
+[out]
+
+[case testNamedTupleForwardAsUpperBoundSerialization]
+from typing import NamedTuple, TypeVar, Generic
+T = TypeVar('T', bound='M')
+class G(Generic[T]):
+    x: T
+
+yg: G[M]
+z: int = G[M]().x.x
+z = G[M]().x[0]
+M = NamedTuple('M', [('x', int)])
+[out]
+
+[case testSelfRefNTIncremental1]
+from typing import Tuple, NamedTuple
+
+Node = NamedTuple('Node', [ # type: ignore
+        ('name', str),
+        ('children', Tuple['Node', ...]),
+    ])
+n: Node
+[builtins fixtures/tuple.pyi]
+
+[case testSelfRefNTIncremental2]
+from typing import Tuple, NamedTuple
+
+A = NamedTuple('A', [ # type: ignore
+        ('x', str),
+        ('y', Tuple['B', ...]),
+    ])
+class B(NamedTuple): # type: ignore
+    x: A
+    y: int
+
+n: A
+[builtins fixtures/tuple.pyi]
+
+[case testSelfRefNTIncremental3]
+from typing import NamedTuple, Tuple
+
+class B(NamedTuple): # type: ignore
+    x: Tuple[A, int]
+    y: int
+A = NamedTuple('A', [ # type: ignore
+        ('x', str),
+        ('y', 'B'),
+    ])
+n: B
+m: A
+lst = [m, n]
+[builtins fixtures/tuple.pyi]
+
+[case testSelfRefNTIncremental4]
+from typing import NamedTuple
+
+class B(NamedTuple): # type: ignore
+    x: A
+    y: int
+class A(NamedTuple): # type: ignore
+    x: str
+    y: B
+
+n: A
+[builtins fixtures/tuple.pyi]
+
+[case testSelfRefNTIncremental5]
+from typing import NamedTuple
+
+B = NamedTuple('B', [ # type: ignore
+        ('x', A),
+        ('y', int),
+    ])
+A = NamedTuple('A', [ # type: ignore
+        ('x', str),
+        ('y', 'B'),
+    ])
+n: A
+def f(m: B) -> None: pass
+[builtins fixtures/tuple.pyi]
+
+[case testCrashWithPartialGlobalAndCycle]
+import bar
+
+[file foo.py]
+import bar
+my_global_dict = {}  # type: ignore
+def external_func_0() -> None:
+    global my_global_dict
+    bar.external_list
+    my_global_dict[12] = 0
+
+[file bar.py]
+import foo
+
+external_list = [0]
+
+[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
index 9896c11..541b0dc 100644
--- a/test-data/unit/check-inference-context.test
+++ b/test-data/unit/check-inference-context.test
@@ -13,7 +13,7 @@ b = None # type: B
 
 ao = f()
 ab = f()
-b = f() # E: Incompatible types in assignment (expression has type A[<nothing>], variable has type "B")
+b = f() # E: Incompatible types in assignment (expression has type "A[<nothing>]", variable has type "B")
 
 def f() -> 'A[T]': pass
 
@@ -29,7 +29,7 @@ b = None # type: B
 
 ao = A()
 ab = A()
-b = A() # E: Incompatible types in assignment (expression has type A[<nothing>], variable has type "B")
+b = A() # E: Incompatible types in assignment (expression has type "A[<nothing>]", variable has type "B")
 
 class A(Generic[T]): pass
 class B: pass
@@ -74,10 +74,10 @@ def g() -> None:
     b = None # type: B
 
     x = f(o)
-    ab = x # E: Incompatible types in assignment (expression has type A[object], variable has type A[B])
+    ab = x # E: Incompatible types in assignment (expression has type "A[object]", variable has type "A[B]")
     ao = x
     y = f(b)
-    ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ao = y # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
     ab = y
 
 def f(a: T) -> 'A[T]': pass
@@ -104,8 +104,8 @@ def g() -> None:
     ab = None # type: A[B]
     b = None # type: B
     x, y = f(b), f(b)
-    ao = x # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-    ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ao = x # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
+    ao = y # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
     ab = x
     ab = y
 
@@ -122,8 +122,8 @@ def h() -> None:
     ab = None # type: A[B]
     b = None # type: B
     x, y = g(f(b))
-    ao = x # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-    ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ao = x # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
+    ao = y # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
     ab = x
     ab = y
 
@@ -161,8 +161,8 @@ class A(Generic[T]): pass
 class B: pass
 [builtins fixtures/tuple.pyi]
 [out]
-main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-main:9: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+main:8: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
+main:9: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
 
 [case testInferenceWithTypeVariableTwiceInReturnTypeAndMultipleVariables]
 from typing import TypeVar, Tuple, Generic
@@ -190,10 +190,10 @@ class A(Generic[T]): pass
 class B: pass
 [builtins fixtures/tuple.pyi]
 [out]
-main:9: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-main:10: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-main:11: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
-main:12: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+main:9: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
+main:10: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
+main:11: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
+main:12: error: Incompatible types in assignment (expression has type "A[B]", variable has type "A[object]")
 
 
 -- Multiple tvar instances in arguments
@@ -309,7 +309,7 @@ ab = None # type: A[B]
 ac = None # type: A[C]
 
 ab.g(f(o))        # E: Argument 1 to "f" has incompatible type "object"; expected "B"
-ac = f(b).g(f(c)) # E: Incompatible types in assignment (expression has type A[B], variable has type A[C])
+ac = f(b).g(f(c)) # E: Incompatible types in assignment (expression has type "A[B]", variable has type "A[C]")
 ac = f(c).g(f(b)) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
 
 ab = f(b).g(f(c))
@@ -334,7 +334,7 @@ aa = None # type: List[A]
 ao = None # type: List[object]
 a = None # type: A
 
-a = [] # E: Incompatible types in assignment (expression has type List[<nothing>], variable has type "A")
+a = [] # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "A")
 
 aa = []
 ao = []
@@ -350,8 +350,8 @@ ao = None # type: List[object]
 a = None # type: A
 b = None # type: B
 
-aa = [b] # E: List item 0 has incompatible type "B"
-ab = [a] # E: List item 0 has incompatible type "A"
+aa = [b] # E: List item 0 has incompatible type "B"; expected "A"
+ab = [a] # E: List item 0 has incompatible type "A"; expected "B"
 
 aa = [a]
 ab = [b]
@@ -371,8 +371,8 @@ ao = None # type: List[object]
 a = None # type: A
 b = None # type: B
 
-ab = [b, a] # E: List item 1 has incompatible type "A"
-ab = [a, b] # E: List item 0 has incompatible type "A"
+ab = [b, a] # E: List item 1 has incompatible type "A"; expected "B"
+ab = [a, b] # E: List item 0 has incompatible type "A"; expected "B"
 
 aa = [a, b, a]
 ao = [a, b]
@@ -387,7 +387,7 @@ def f() -> None:
     a = []     # E: Need type annotation for variable
     b = [None]
     c = [B()]
-    c = [object()] # E: List item 0 has incompatible type "object"
+    c = [object()] # E: List item 0 has incompatible type "object"; expected "B"
     c = [B()]
 class B: pass
 [builtins fixtures/list.pyi]
@@ -401,8 +401,8 @@ ab = None # type: List[B]
 b = None # type: B
 o = None # type: object
 
-aao = [[o], ab] # E: List item 1 has incompatible type List[B]
-aab = [[], [o]] # E: List item 0 has incompatible type "object"
+aao = [[o], ab] # E: List item 1 has incompatible type "List[B]"; expected "List[object]"
+aab = [[], [o]] # E: List item 0 has incompatible type "object"; expected "B"
 
 aao = [[None], [b], [], [o]]
 aab = [[None], [b], []]
@@ -462,7 +462,7 @@ d = {A() : a_c,
 [case testInitializationWithInferredGenericType]
 from typing import TypeVar, Generic
 T = TypeVar('T')
-c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected C[A]
+c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected "C[A]"
 
 def f(x: T) -> T: pass
 class C(Generic[T]): pass
@@ -503,7 +503,7 @@ from abc import abstractmethod, ABCMeta
 t = TypeVar('t')
 x = A() # type: I[int]
 a_object = A() # type: A[object]
-y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type A[object], variable has type I[int])
+y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type "A[object]", variable has type "I[int]")
 
 class I(Generic[t]):
     @abstractmethod
@@ -529,7 +529,7 @@ class set(Generic[t]):
     def __init__(self, iterable: Iterable[t]) -> None: pass
 b = bool()
 l = set([b])
-l = set([object()]) # E: List item 0 has incompatible type "object"
+l = set([object()]) # E: List item 0 has incompatible type "object"; expected "bool"
 [builtins fixtures/for.pyi]
 
 
@@ -564,11 +564,27 @@ class B:
 from typing import List, Callable
 f = None # type: Callable[[], List[A]]
 f = lambda: []
-f = lambda: [B()]  # E: List item 0 has incompatible type "B"
+f = lambda: [B()]  # E: List item 0 has incompatible type "B"; expected "A"
 class A: pass
 class B: pass
 [builtins fixtures/list.pyi]
 
+[case testInferLambdaTypeUsingContext]
+x : str = (lambda x: x + 1)(1)  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+reveal_type((lambda x, y: x + y)(1, 2))  # E: Revealed type is 'builtins.int'
+(lambda x, y: x + y)(1, "")  # E: Unsupported operand types for + ("int" and "str")
+(lambda *, x, y: x + y)(x=1, y="")  # E: Unsupported operand types for + ("int" and "str")
+reveal_type((lambda s, i: s)(i=0, s='x')) # E: Revealed type is 'builtins.str'
+reveal_type((lambda s, i: i)(i=0, s='x')) # E: Revealed type is 'builtins.int'
+reveal_type((lambda x, s, i: x)(1.0, i=0, s='x')) # E: Revealed type is 'builtins.float'
+(lambda x, s, i: x)() # E: Too few arguments
+(lambda: 0)(1) # E: Too many arguments
+-- varargs are not handled, but it should not crash
+reveal_type((lambda *k, s, i: i)(type, i=0, s='x')) # E: Revealed type is 'Any'
+reveal_type((lambda s, *k, i: i)(i=0, s='x')) # E: Revealed type is 'Any'
+reveal_type((lambda s, i, **k: i)(i=0, s='x')) # E: Revealed type is 'Any'
+[builtins fixtures/dict.pyi]
+
 [case testInferLambdaAsGenericFunctionArgument]
 from typing import TypeVar, List, Any, Callable
 t = TypeVar('t')
@@ -594,9 +610,9 @@ f = lambda x: A() # type: Callable[[], A]
 f2 = lambda: A() # type: Callable[[A], A]
 class A: pass
 [out]
-main:2: error: Incompatible types in assignment (expression has type Callable[[Any], A], variable has type Callable[[], A])
+main:2: error: Incompatible types in assignment (expression has type "Callable[[Any], A]", variable has type "Callable[[], A]")
 main:2: error: Cannot infer type of lambda
-main:3: error: Incompatible types in assignment (expression has type Callable[[], A], variable has type Callable[[A], A])
+main:3: error: Incompatible types in assignment (expression has type "Callable[[], A]", variable has type "Callable[[A], A]")
 main:3: error: Cannot infer type of lambda
 
 [case testEllipsisContextForLambda]
@@ -608,7 +624,7 @@ f4 = lambda x: x # type: Callable[..., int]
 g = lambda x: 1 # type: Callable[..., str]
 [builtins fixtures/dict.pyi]
 [out]
-main:6: error: Incompatible types in assignment (expression has type Callable[[Any], int], variable has type Callable[..., str])
+main:6: error: Incompatible types in assignment (expression has type "Callable[[Any], int]", variable has type "Callable[..., str]")
 main:6: error: Incompatible return value type (got "int", expected "str")
 
 [case testEllipsisContextForLambda2]
@@ -635,7 +651,7 @@ def f(func: Callable[[T], S], *z: T, r: S = None) -> S: pass
 f(lambda x: 0 if isinstance(x, B) else 1) # E: Cannot infer type argument 1 of "f"
 f(lambda x: 0 if isinstance(x, B) else 1, A())() # E: "int" not callable
 f(lambda x: x if isinstance(x, B) else B(), A(), r=B())() # E: "B" not callable
-f( # E: Argument 1 to "f" has incompatible type Callable[[A], A]; expected Callable[[A], B]
+f( # E: Argument 1 to "f" has incompatible type "Callable[[A], A]"; expected "Callable[[A], B]"
     lambda x: B() if isinstance(x, B) else x, # E: Incompatible return value type (got "A", expected "B")
     A(), r=B())
 [builtins fixtures/isinstance.pyi]
@@ -663,7 +679,7 @@ class B: pass
 
 m = map(g, [A()])
 b = m # type: List[B]
-a = m # type: List[A] # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+a = m # type: List[A] # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]")
 [builtins fixtures/list.pyi]
 
 
@@ -677,8 +693,8 @@ a, b, c = None, None, None # type: (List[A], List[B], List[C])
 a = a or []
 a = [] or a
 b = b or [C()]
-a = a or b # E: Incompatible types in assignment (expression has type "Union[List[A], List[B]]", variable has type List[A])
-b = b or c # E: Incompatible types in assignment (expression has type "Union[List[B], List[C]]", variable has type List[B])
+a = a or b # E: Incompatible types in assignment (expression has type "Union[List[A], List[B]]", variable has type "List[A]")
+b = b or c # E: Incompatible types in assignment (expression has type "Union[List[B], List[C]]", variable has type "List[B]")
 
 class A: pass
 class B: pass
@@ -732,7 +748,7 @@ from typing import List
 i = None # type: List[int]
 s = None # type: List[str]
 i = i = []
-i = s = [] # E: Incompatible types in assignment (expression has type List[str], variable has type List[int])
+i = s = [] # E: Incompatible types in assignment (expression has type "List[str]", variable has type "List[int]")
 [builtins fixtures/list.pyi]
 
 [case testContextForAttributeDeclaredInInit]
@@ -743,14 +759,14 @@ class A:
 a = A()
 a.x = []
 a.x = [1]
-a.x = [''] # E: List item 0 has incompatible type "str"
+a.x = [''] # E: List item 0 has incompatible type "str"; expected "int"
 [builtins fixtures/list.pyi]
 
 [case testListMultiplyInContext]
 from typing import List
 a = None  # type: List[int]
 a = [None] * 3
-a = [''] * 3 # E: List item 0 has incompatible type "str"
+a = [''] * 3 # E: List item 0 has incompatible type "str"; expected "int"
 [builtins fixtures/list.pyi]
 
 [case testUnionTypeContext]
@@ -809,7 +825,7 @@ S = TypeVar('S')
 def f(a: T, b: S) -> None:
     c = lambda x: x  # type: Callable[[T], S]
 [out]
-main:5: error: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], S])
+main:5: error: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[T], S]")
 main:5: error: Incompatible return value type (got "T", expected "S")
 
 [case testLambdaInGenericClass]
@@ -820,7 +836,7 @@ class A(Generic[T]):
     def f(self, b: S) -> None:
         c = lambda x: x  # type: Callable[[T], S]
 [out]
-main:6: error: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], S])
+main:6: error: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[T], S]")
 main:6: error: Incompatible return value type (got "T", expected "S")
 
 [case testRevealTypeContext]
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 42cd312..3549a27 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -69,7 +69,7 @@ class B: pass
 import typing
 def f() -> None:
     a = g()
-    None(a) # E: None not callable
+    None(a) # E: "None" not callable
     a.x()
 
 def g(): pass
@@ -81,7 +81,7 @@ g = None # type: Any
 
 def f(a: Any) -> None:
     b = g
-    None(b) # E: None not callable
+    None(b) # E: "None" not callable
     a.x()
 [out]
 
@@ -126,7 +126,7 @@ a_s = None # type: A[str]
 def f() -> None:
     a_int = A() # type: A[int]
     a = a_int
-    a = a_s # E: Incompatible types in assignment (expression has type A[str], variable has type A[int])
+    a = a_s # E: Incompatible types in assignment (expression has type "A[str]", variable has type "A[int]")
     a = a_i
 [builtins fixtures/tuple.pyi]
 [out]
@@ -480,11 +480,11 @@ ao = None # type: A[object]
 ab = None # type: A[B]
 ac = None # type: A[C]
 
-ab = f(ao) # E: Argument 1 to "f" has incompatible type A[object]; expected A[B]
-ao = f(ab) # E: Argument 1 to "f" has incompatible type A[B]; expected A[object]
-ab = f(ac) # E: Argument 1 to "f" has incompatible type A[C]; expected A[B]
-ab = g(ao) # E: Argument 1 to "g" has incompatible type A[object]; expected A[B]
-ao = g(ab) # E: Argument 1 to "g" has incompatible type A[B]; expected A[object]
+ab = f(ao) # E: Argument 1 to "f" has incompatible type "A[object]"; expected "A[B]"
+ao = f(ab) # E: Argument 1 to "f" has incompatible type "A[B]"; expected "A[object]"
+ab = f(ac) # E: Argument 1 to "f" has incompatible type "A[C]"; expected "A[B]"
+ab = g(ao) # E: Argument 1 to "g" has incompatible type "A[object]"; expected "A[B]"
+ao = g(ab) # E: Argument 1 to "g" has incompatible type "A[B]"; expected "A[object]"
 
 ab = f(ab)
 ac = f(ac)
@@ -648,7 +648,7 @@ def mymap(f: Callable[[t], s], a: List[t]) -> List[s]: pass
 l = mymap(f, [b])
 l = [A()]
 lb = [b]
-l = lb # E: Incompatible types in assignment (expression has type List[bool], variable has type List[A])
+l = lb # E: Incompatible types in assignment (expression has type "List[bool]", variable has type "List[A]")
 [builtins fixtures/for.pyi]
 
 [case testGenericFunctionWithTypeTypeAsCallable]
@@ -676,7 +676,7 @@ f(1, 1)() # E: "int" not callable
 
 def g(x: Union[T, List[T]]) -> List[T]: pass
 def h(x: List[str]) -> None: pass
-g('a')() # E: List[str] not callable
+g('a')() # E: "List[str]" not callable
 
 # The next line is a case where there are multiple ways to satisfy a constraint
 # involving a Union. Either T = List[str] or T = str would turn out to be valid,
@@ -684,7 +684,7 @@ g('a')() # E: List[str] not callable
 # to backtrack later) and defaults to T = <nothing>. The result is an
 # awkward error message. Either a better error message, or simply accepting the
 # call, would be preferable here.
-g(['a']) # E: Argument 1 to "g" has incompatible type List[str]; expected List[<nothing>]
+g(['a']) # E: Argument 1 to "g" has incompatible type "List[str]"; expected "List[<nothing>]"
 
 h(g(['a']))
 
@@ -693,7 +693,7 @@ a = [1]
 b = ['b']
 i(a, a, b)
 i(b, a, b)
-i(a, b, b) # E: Argument 1 to "i" has incompatible type List[int]; expected List[str]
+i(a, b, b) # E: Argument 1 to "i" has incompatible type "List[int]"; expected "List[str]"
 [builtins fixtures/list.pyi]
 
 [case testCallableListJoinInference]
@@ -750,10 +750,10 @@ AnyStr = TypeVar('AnyStr', bytes, str)
 def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass
 f('foo')
 f('foo', 'bar')
-f('foo', b'bar') # E: Type argument 1 of "f" has incompatible value "object"
+f('foo', b'bar') # E: Value of type variable "AnyStr" of "f" cannot be "object"
 f(1)
 f(1, 'foo')
-f(1, 'foo', b'bar') # E: Type argument 1 of "f" has incompatible value "object"
+f(1, 'foo', b'bar') # E: Value of type variable "AnyStr" of "f" cannot be "object"
 [builtins fixtures/primitives.pyi]
 
 
@@ -778,7 +778,7 @@ from typing import TypeVar, Union, List
 T = TypeVar('T')
 def f() -> List[T]: pass
 d1 = f() # type: Union[List[int], str]
-d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type List[<nothing>], variable has type "Union[int, str]")
+d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "Union[int, str]")
 def g(x: T) -> List[T]: pass
 d3 = g(1) # type: Union[List[int], List[str]]
 [builtins fixtures/list.pyi]
@@ -792,7 +792,7 @@ def k1(x: int, y: List[T]) -> List[Union[T, int]]: pass
 def k2(x: S, y: List[T]) -> List[Union[T, int]]: pass
 a = k2
 a = k2
-a = k1 # E: Incompatible types in assignment (expression has type Callable[[int, List[T]], List[Union[T, int]]], variable has type Callable[[S, List[T]], List[Union[T, int]]])
+a = k1 # E: Incompatible types in assignment (expression has type "Callable[[int, List[T]], List[Union[T, int]]]", variable has type "Callable[[S, List[T]], List[Union[T, int]]]")
 b = k1
 b = k1
 b = k2
@@ -840,7 +840,7 @@ def d_aa() -> Dict[A, A]: return {}
 a, b = None, None # type: (A, B)
 d = {a:b}
 d = d_ab()
-d = d_aa() # E: Incompatible types in assignment (expression has type Dict[A, A], variable has type Dict[A, B])
+d = d_aa() # E: Incompatible types in assignment (expression has type "Dict[A, A]", variable has type "Dict[A, B]")
 [builtins fixtures/dict.pyi]
 
 [case testSetLiteral]
@@ -851,7 +851,7 @@ def s_s() -> Set[str]: return set()
 s = {a}
 s = {x}
 s = s_i()
-s = s_s() # E: Incompatible types in assignment (expression has type Set[str], variable has type Set[int])
+s = s_s() # E: Incompatible types in assignment (expression has type "Set[str]", variable has type "Set[int]")
 [builtins fixtures/set.pyi]
 
 [case testSetWithStarExpr]
@@ -1074,14 +1074,14 @@ from typing import List, Callable
 li = [1]
 l = lambda: li
 f1 = l # type: Callable[[], List[int]]
-f2 = l # type: Callable[[], List[str]] # E: Incompatible types in assignment (expression has type Callable[[], List[int]], variable has type Callable[[], List[str]])
+f2 = l # type: Callable[[], List[str]] # E: Incompatible types in assignment (expression has type "Callable[[], List[int]]", variable has type "Callable[[], List[str]]")
 [builtins fixtures/list.pyi]
 
 [case testInferLambdaType2]
 from typing import List, Callable
 l = lambda: [B()]
 f1 = l # type: Callable[[], List[B]]
-f2 = l # type: Callable[[], List[A]] # E: Incompatible types in assignment (expression has type Callable[[], List[B]], variable has type Callable[[], List[A]])
+f2 = l # type: Callable[[], List[A]] # E: Incompatible types in assignment (expression has type "Callable[[], List[B]]", variable has type "Callable[[], List[A]]")
 
 class A: pass
 class B: pass
@@ -1119,7 +1119,7 @@ from typing import Callable
 def f(a: Callable[..., None] = lambda *a, **k: None):
     pass
 
-def g(a: Callable[..., None] = lambda *a, **k: 1):  # E: Incompatible types in assignment (expression has type Callable[[VarArg(Any), KwArg(Any)], int], variable has type Callable[..., None])
+def g(a: Callable[..., None] = lambda *a, **k: 1):  # E: Incompatible default for argument "a" (default has type "Callable[[VarArg(Any), KwArg(Any)], int]", argument has type "Callable[..., None]")
     pass
 [builtins fixtures/dict.pyi]
 
@@ -1154,7 +1154,7 @@ a = None # type: List[A]
 o = None # type: List[object]
 a2 = a or []
 a = a2
-a2 = o # E: Incompatible types in assignment (expression has type List[object], variable has type List[A])
+a2 = o # E: Incompatible types in assignment (expression has type "List[object]", variable has type "List[A]")
 class A: pass
 [builtins fixtures/list.pyi]
 
@@ -1194,7 +1194,7 @@ x2 = [B(), A()]
 x3 = [B(), B()]
 a = x1
 a = x2
-a = x3 # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+a = x3 # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]")
 [builtins fixtures/list.pyi]
 
 [case testListWithDucktypeCompatibilityAndTransitivity]
@@ -1210,7 +1210,7 @@ x2 = [C(), A()]
 x3 = [B(), C()]
 a = x1
 a = x2
-a = x3 # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+a = x3 # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]")
 [builtins fixtures/list.pyi]
 
 
@@ -1256,8 +1256,8 @@ a()
 a, b = [], []
 a.append(1)
 b.append('')
-a() # E: List[int] not callable
-b() # E: List[str] not callable
+a() # E: "List[int]" not callable
+b() # E: "List[str]" not callable
 [builtins fixtures/list.pyi]
 [out]
 
@@ -1371,14 +1371,14 @@ a.add('')  # E: Argument 1 to "add" of "set" has incompatible type "str"; expect
 [case testInferDictInitializedToEmpty]
 a = {}
 a[1] = ''
-a() # E: Dict[int, str] not callable
+a() # E: "Dict[int, str]" not callable
 [builtins fixtures/dict.pyi]
 [out]
 
 [case testInferDictInitializedToEmptyUsingUpdate]
 a = {}
 a.update({'': 42})
-a() # E: Dict[str, int] not callable
+a() # E: "Dict[str, int]" not callable
 [builtins fixtures/dict.pyi]
 [out]
 
@@ -1447,7 +1447,7 @@ def f() -> None:
 import typing
 def f() -> None:
     a = None
-    a.x() # E: None has no attribute "x"
+    a.x() # E: "None" has no attribute "x"
 [out]
 
 [case testGvarPartiallyInitializedToNone]
@@ -1504,7 +1504,7 @@ x = None
 def f() -> None:
     x = None
     x = 1
-x()  # E: None not callable
+x()  # E: "None" not callable
 
 [case testAttributePartiallyInitializedToNone]
 class A:
@@ -1523,8 +1523,8 @@ class A:
         self.x = 1
         self.x()
 [out]
-main:6: error: Incompatible types in assignment (expression has type "int", variable has type None)
-main:7: error: None not callable
+main:6: error: Incompatible types in assignment (expression has type "int", variable has type "None")
+main:7: error: "None" not callable
 
 [case testGlobalInitializedToNoneSetFromFunction]
 a = None
@@ -1553,7 +1553,7 @@ class A:
             pass
 [builtins fixtures/for.pyi]
 [out]
-main:5: error: None has no attribute "__iter__"
+main:5: error: "None" has no attribute "__iter__"
 
 [case testPartialTypeErrorSpecialCase2]
 # This used to crash.
@@ -1574,7 +1574,7 @@ class A:
             pass
 [builtins fixtures/for.pyi]
 [out]
-main:4: error: None has no attribute "__iter__"
+main:4: error: "None" has no attribute "__iter__"
 
 
 -- Multipass
@@ -1680,7 +1680,7 @@ def g(d: Dict[str, int]) -> None: pass
 def f() -> None:
     x = {}
     x[1] = y
-    g(x) # E: Argument 1 to "g" has incompatible type Dict[int, str]; expected Dict[str, int]
+    g(x) # E: Argument 1 to "g" has incompatible type "Dict[int, str]"; expected "Dict[str, int]"
     x[1] = 1 # E: Incompatible types in assignment (expression has type "int", target has type "str")
     x[1] = ''
 y = ''
@@ -1694,7 +1694,7 @@ def f() -> None:
     x = {}
     y
     x[1] = 1
-    g(x) # E: Argument 1 to "g" has incompatible type Dict[int, int]; expected Dict[str, int]
+    g(x) # E: Argument 1 to "g" has incompatible type "Dict[int, int]"; expected "Dict[str, int]"
 y = ''
 [builtins fixtures/dict.pyi]
 [out]
@@ -1713,7 +1713,7 @@ def f() -> None:
     y = o
     x = []
     x.append(y)
-    x() # E: List[int] not callable
+    x() # E: "List[int]" not callable
 o = 1
 [builtins fixtures/list.pyi]
 [out]
@@ -1723,7 +1723,7 @@ def f() -> None:
     y = o
     x = {}
     x[''] = y
-    x() # E: Dict[str, int] not callable
+    x() # E: "Dict[str, int]" not callable
 o = 1
 [builtins fixtures/dict.pyi]
 [out]
@@ -1825,20 +1825,20 @@ a2.foo2()
 [case testUnificationEmptyListLeft]
 def f(): pass
 a = [] if f() else [0]
-a() # E: List[int] not callable
+a() # E: "List[int]" not callable
 [builtins fixtures/list.pyi]
 
 [case testUnificationEmptyListRight]
 def f(): pass
 a = [0] if f() else []
-a() # E: List[int] not callable
+a() # E: "List[int]" not callable
 [builtins fixtures/list.pyi]
 
 [case testUnificationEmptyListLeftInContext]
 from typing import List
 def f(): pass
 a = [] if f() else [0] # type: List[int]
-a() # E: List[int] not callable
+a() # E: "List[int]" not callable
 [builtins fixtures/list.pyi]
 
 [case testUnificationEmptyListRightInContext]
@@ -1846,37 +1846,37 @@ a() # E: List[int] not callable
 from typing import List
 def f(): pass
 a = [0] if f() else [] # type: List[int]
-a() # E: List[int] not callable
+a() # E: "List[int]" not callable
 [builtins fixtures/list.pyi]
 
 [case testUnificationEmptySetLeft]
 def f(): pass
 a = set() if f() else {0}
-a() # E: Set[int] not callable
+a() # E: "Set[int]" not callable
 [builtins fixtures/set.pyi]
 
 [case testUnificationEmptyDictLeft]
 def f(): pass
 a = {} if f() else {0: 0}
-a() # E: Dict[int, int] not callable
+a() # E: "Dict[int, int]" not callable
 [builtins fixtures/dict.pyi]
 
 [case testUnificationEmptyDictRight]
 def f(): pass
 a = {0: 0} if f() else {}
-a() # E: Dict[int, int] not callable
+a() # E: "Dict[int, int]" not callable
 [builtins fixtures/dict.pyi]
 
 [case testUnificationDictWithEmptyListLeft]
 def f(): pass
 a = {0: []} if f() else {0: [0]}
-a() # E: Dict[int, List[int]] not callable
+a() # E: "Dict[int, List[int]]" not callable
 [builtins fixtures/dict.pyi]
 
 [case testUnificationDictWithEmptyListRight]
 def f(): pass
 a = {0: [0]} if f() else {0: []}
-a() # E: Dict[int, List[int]] not callable
+a() # E: "Dict[int, List[int]]" not callable
 [builtins fixtures/dict.pyi]
 
 [case testMisguidedSetItem]
@@ -1887,3 +1887,43 @@ C[0] = 0
 [out]
 main:4: error: Type expected within [...]
 main:4: error: Unsupported target for indexed assignment
+
+[case testNoCrashOnPartialMember]
+class C:
+    x = None
+    def __init__(self) -> None:
+        self.x = []  # E: Need type annotation for variable
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNoCrashOnPartialVariable]
+from typing import Tuple, TypeVar
+T = TypeVar('T', bound=str)
+
+def f(x: T) -> Tuple[T]:
+    ...
+x = None
+(x,) = f('')
+reveal_type(x) # E: Revealed type is 'builtins.str'
+[out]
+
+[case testNoCrashOnPartialVariable2]
+from typing import Tuple, TypeVar
+T = TypeVar('T', bound=str)
+
+def f() -> Tuple[T]:
+    ...
+x = None
+(x,) = f()
+[out]
+
+[case testNoCrashOnPartialVariable3]
+from typing import Tuple, TypeVar
+T = TypeVar('T')
+
+def f(x: T) -> Tuple[T, T]:
+    ...
+x = None
+(x, x) = f('')
+reveal_type(x) # E: Revealed type is 'builtins.str'
+[out]
diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test
index 795bcb1..8279e1a 100644
--- a/test-data/unit/check-isinstance.test
+++ b/test-data/unit/check-isinstance.test
@@ -684,7 +684,7 @@ while bool():
         x + 'a'
         break
     x + [1]
-    x + 'a'           # E: Unsupported operand types for + (List[int] and "str")
+    x + 'a'           # E: Unsupported operand types for + ("List[int]" and "str")
 x + [1]               # E: Unsupported operand types for + (likely involving Union)
 [builtins fixtures/isinstancelist.pyi]
 
@@ -1187,6 +1187,8 @@ if isinstance(x, B) and isinstance(y, int):
     1() # type checking skipped
 if isinstance(y, int) and isinstance(x, B):
     1() # type checking skipped
+if isinstance(y, int) and y > 42:
+    1() # type checking skipped
 [builtins fixtures/isinstancelist.pyi]
 
 [case testReturnWithCallExprAndIsinstance]
@@ -1444,10 +1446,10 @@ def f(x: Union[Type[int], Type[str], Type[List]]) -> None:
         reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str]'
         x()[1]  # E: Value of type "Union[int, str]" is not indexable
     else:
-        reveal_type(x)  # E: Revealed type is 'Type[builtins.list]'
+        reveal_type(x)  # E: Revealed type is 'Type[builtins.list[Any]]'
         reveal_type(x())  # E: Revealed type is 'builtins.list[<nothing>]'
         x()[1]
-    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list]]'
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
     reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
     if issubclass(x, (str, (list,))):
         reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]'
@@ -1466,10 +1468,10 @@ def f(x: Type[Union[int, str, List]]) -> None:
         reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str]'
         x()[1]  # E: Value of type "Union[int, str]" is not indexable
     else:
-        reveal_type(x)  # E: Revealed type is 'Type[builtins.list]'
+        reveal_type(x)  # E: Revealed type is 'Type[builtins.list[Any]]'
         reveal_type(x())  # E: Revealed type is 'builtins.list[<nothing>]'
         x()[1]
-    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list]]'
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
     reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
     if issubclass(x, (str, (list,))):
         reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]'
@@ -1483,17 +1485,17 @@ def f(x: Type[Union[int, str, List]]) -> None:
 from typing import Union, List, Tuple, Dict, Type
 
 def f(x: Type[Union[int, str, List]]) -> None:
-    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list]]'
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
     reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
     if issubclass(x, (str, (int,))):
         reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]'
         reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str]'
         x()[1]  # E: Value of type "Union[int, str]" is not indexable
     else:
-        reveal_type(x)  # E: Revealed type is 'Type[builtins.list]'
+        reveal_type(x)  # E: Revealed type is 'Type[builtins.list[Any]]'
         reveal_type(x())  # E: Revealed type is 'builtins.list[<nothing>]'
         x()[1]
-    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list]]'
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
     reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
     if issubclass(x, (str, (list,))):
         reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]'
@@ -1524,7 +1526,7 @@ def test_issubclass(cls: Type[Goblin]) -> None:
     else:
         reveal_type(cls)  # E: Revealed type is 'Type[__main__.Goblin]'
         cls.level
-        cls.job  # E: Type[Goblin] has no attribute "job"
+        cls.job  # E: "Type[Goblin]" has no attribute "job"
         g = cls()
         g.level = 15
         g.job  # E: "Goblin" has no attribute "job"
@@ -1545,7 +1547,7 @@ def test_issubclass(cls: Type[Mob]) -> None:
     if issubclass(cls, Goblin):
         reveal_type(cls)  # E: Revealed type is 'Type[__main__.Goblin]'
         cls.level
-        cls.job  # E: Type[Goblin] has no attribute "job"
+        cls.job  # E: "Type[Goblin]" has no attribute "job"
         g = cls()
         g.level = 15
         g.job  # E: "Goblin" has no attribute "job"
@@ -1559,8 +1561,8 @@ def test_issubclass(cls: Type[Mob]) -> None:
             g.job = 'Warrior' # E: Cannot assign to class variable "job" via instance
     else:
         reveal_type(cls)  # E: Revealed type is 'Type[__main__.Mob]'
-        cls.job  # E: Type[Mob] has no attribute "job"
-        cls.level  # E: Type[Mob] has no attribute "level"
+        cls.job  # E: "Type[Mob]" has no attribute "job"
+        cls.level  # E: "Type[Mob]" has no attribute "level"
         m = cls()
         m.level = 15  # E: "Mob" has no attribute "level"
         m.job  # E: "Mob" has no attribute "job"
@@ -1601,7 +1603,7 @@ def test_issubclass(cls: Type[Mob]) -> None:
     if issubclass(cls, (Goblin, GoblinAmbusher)):
         reveal_type(cls)  # E: Revealed type is 'Type[__main__.Goblin]'
         cls.level
-        cls.job  # E: Type[Goblin] has no attribute "job"
+        cls.job  # E: "Type[Goblin]" has no attribute "job"
         g = cls()
         g.level = 15
         g.job  # E: "Goblin" has no attribute "job"
@@ -1615,8 +1617,8 @@ def test_issubclass(cls: Type[Mob]) -> None:
             ga.job = "Warrior"  # E: Cannot assign to class variable "job" via instance
     else:
         reveal_type(cls)  # E: Revealed type is 'Type[__main__.Mob]'
-        cls.job  # E: Type[Mob] has no attribute "job"
-        cls.level  # E: Type[Mob] has no attribute "level"
+        cls.job  # E: "Type[Mob]" has no attribute "job"
+        cls.level  # E: "Type[Mob]" has no attribute "level"
         m = cls()
         m.level = 15  # E: "Mob" has no attribute "level"
         m.job  # E: "Mob" has no attribute "job"
@@ -1747,10 +1749,24 @@ if isinstance(x): # E: Too few arguments for "isinstance"
 
 [case testIsInstanceTooManyArgs]
 isinstance(1, 1, 1) # E: Too many arguments for "isinstance" \
-         # E: Argument 2 to "isinstance" has incompatible type "int"; expected "Union[type, tuple]"
+         # E: Argument 2 to "isinstance" has incompatible type "int"; expected "Union[type, Tuple[Any, ...]]"
 x: object
 if isinstance(x, str, 1): # E: Too many arguments for "isinstance"
     reveal_type(x) # E: Revealed type is 'builtins.object'
     x = 1
     reveal_type(x) # E: Revealed type is 'builtins.int'
 [builtins fixtures/isinstancelist.pyi]
+
+
+[case testIsinstanceNarrowAny]
+from typing import Any
+
+def narrow_any_to_str_then_reassign_to_int() -> None:
+    v = 1 # type: Any
+
+    if isinstance(v, str):
+        reveal_type(v)  # E: Revealed type is 'builtins.str'
+        v = 2
+        reveal_type(v)  # E: Revealed type is 'Any'
+
+[builtins fixtures/isinstance.pyi]
diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test
index 6d921b8..3d682b4 100644
--- a/test-data/unit/check-kwargs.test
+++ b/test-data/unit/check-kwargs.test
@@ -4,7 +4,7 @@
 [case testTypeErrorInKeywordArgument]
 import typing
 def f(o: object) -> None: pass
-f(o=None()) # E: None not callable
+f(o=None()) # E: "None" not callable
 
 [case testSimpleKeywordArgument]
 import typing
@@ -89,8 +89,8 @@ class A: pass
 [case testKeywordArgumentsWithDynamicallyTypedCallable]
 from typing import Any
 f = None # type: Any
-f(x=f(), z=None()) # E: None not callable
-f(f, zz=None()) # E: None not callable
+f(x=f(), z=None()) # E: "None" not callable
+f(f, zz=None()) # E: "None" not callable
 f(x=None)
 
 [case testKeywordArgumentWithFunctionObject]
@@ -216,8 +216,8 @@ class A: pass
 from typing import Dict, Any
 def f( **kwargs: 'A') -> None:
     d1 = kwargs # type: Dict[str, A]
-    d2 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type Dict[str, A], variable has type Dict[A, Any])
-    d3 = kwargs # type: Dict[Any, str] # E: Incompatible types in assignment (expression has type Dict[str, A], variable has type Dict[Any, str])
+    d2 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type "Dict[str, A]", variable has type "Dict[A, Any]")
+    d3 = kwargs # type: Dict[Any, str] # E: Incompatible types in assignment (expression has type "Dict[str, A]", variable has type "Dict[Any, str]")
 class A: pass
 [builtins fixtures/dict.pyi]
 [out]
@@ -227,7 +227,7 @@ from typing import Dict, Any
 def f(**kwargs) -> None:
     d1 = kwargs # type: Dict[str, A]
     d2 = kwargs # type: Dict[str, str]
-    d3 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type Dict[str, Any], variable has type Dict[A, Any])
+    d3 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type "Dict[str, Any]", variable has type "Dict[A, Any]")
 class A: pass
 [builtins fixtures/dict.pyi]
 [out]
@@ -252,8 +252,8 @@ d = None # type: Dict[str, A]
 f(**d)
 f(x=A(), **d)
 d2 = None # type: Dict[str, B]
-f(**d2)        # E: Argument 1 to "f" has incompatible type **Dict[str, B]; expected "A"
-f(x=A(), **d2) # E: Argument 2 to "f" has incompatible type **Dict[str, B]; expected "A"
+f(**d2)        # E: Argument 1 to "f" has incompatible type "**Dict[str, B]"; expected "A"
+f(x=A(), **d2) # E: Argument 2 to "f" has incompatible type "**Dict[str, B]"; expected "A"
 class A: pass
 class B: pass
 [builtins fixtures/dict.pyi]
@@ -316,7 +316,7 @@ def f(a: 'A', b: 'B') -> None: pass
 d = None # type: Dict[str, Any]
 f(**d)
 d2 = None # type: Dict[str, A]
-f(**d2) # E: Argument 1 to "f" has incompatible type **Dict[str, A]; expected "B"
+f(**d2) # E: Argument 1 to "f" has incompatible type "**Dict[str, A]"; expected "B"
 class A: pass
 class B: pass
 [builtins fixtures/dict.pyi]
@@ -362,15 +362,15 @@ def f(a: int) -> None:
     pass
 
 s = ('',)
-f(*s) # E: Argument 1 to "f" has incompatible type *"Tuple[str]"; expected "int"
+f(*s) # E: Argument 1 to "f" has incompatible type "*Tuple[str]"; expected "int"
 
 a = {'': 0}
-f(a) # E: Argument 1 to "f" has incompatible type Dict[str, int]; expected "int"
+f(a) # E: Argument 1 to "f" has incompatible type "Dict[str, int]"; expected "int"
 f(**a) # okay
 
 b = {'': ''}
-f(b) # E: Argument 1 to "f" has incompatible type Dict[str, str]; expected "int"
-f(**b) # E: Argument 1 to "f" has incompatible type **Dict[str, str]; expected "int"
+f(b) # E: Argument 1 to "f" has incompatible type "Dict[str, str]"; expected "int"
+f(**b) # E: Argument 1 to "f" has incompatible type "**Dict[str, str]"; expected "int"
 
 c = {0: 0}
 f(**c) # E: Keywords must be strings
diff --git a/test-data/unit/check-lists.test b/test-data/unit/check-lists.test
index c9c67e8..c575f4b 100644
--- a/test-data/unit/check-lists.test
+++ b/test-data/unit/check-lists.test
@@ -64,7 +64,7 @@ class C: pass
 [case testListWithStarExpr]
 (x, *a) = [1, 2, 3]
 a = [1, *[2, 3]]
-reveal_type(a)  # E: Revealed type is 'builtins.list[builtins.int]'
+reveal_type(a)  # E: Revealed type is 'builtins.list[builtins.int*]'
 b = [0, *a]
 reveal_type(b)  # E: Revealed type is 'builtins.list[builtins.int*]'
 c = [*a, 0]
diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test
index b1b6857..f3104c6 100644
--- a/test-data/unit/check-modules.test
+++ b/test-data/unit/check-modules.test
@@ -201,7 +201,7 @@ None + ''
 [out]
 main:1: error: Cannot find module named 'nonexistent'
 main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
+main:2: error: Unsupported left operand type for + ("None")
 
 [case testTypeCheckWithUnknownModule2]
 import m, nonexistent
@@ -213,7 +213,7 @@ x = 1
 [out]
 main:1: error: Cannot find module named 'nonexistent'
 main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
+main:2: error: Unsupported left operand type for + ("None")
 main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
 
 [case testTypeCheckWithUnknownModule3]
@@ -226,7 +226,7 @@ x = 1
 [out]
 main:1: error: Cannot find module named 'nonexistent'
 main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
+main:2: error: Unsupported left operand type for + ("None")
 main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
 
 [case testTypeCheckWithUnknownModule4]
@@ -236,7 +236,7 @@ None + ''
 main:1: error: Cannot find module named 'nonexistent'
 main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
 main:1: error: Cannot find module named 'another'
-main:2: error: Unsupported left operand type for + (None)
+main:2: error: Unsupported left operand type for + ("None")
 
 [case testTypeCheckWithUnknownModule5]
 import nonexistent as x
@@ -244,7 +244,7 @@ None + ''
 [out]
 main:1: error: Cannot find module named 'nonexistent'
 main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
+main:2: error: Unsupported left operand type for + ("None")
 
 [case testTypeCheckWithUnknownModuleUsingFromImport]
 from nonexistent import x
@@ -252,7 +252,7 @@ None + ''
 [out]
 main:1: error: Cannot find module named 'nonexistent'
 main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
+main:2: error: Unsupported left operand type for + ("None")
 
 [case testTypeCheckWithUnknownModuleUsingImportStar]
 from nonexistent import *
@@ -260,7 +260,7 @@ None + ''
 [out]
 main:1: error: Cannot find module named 'nonexistent'
 main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
-main:2: error: Unsupported left operand type for + (None)
+main:2: error: Unsupported left operand type for + ("None")
 
 [case testAccessingUnknownModule]
 import xyz
@@ -411,14 +411,14 @@ import typing
 __all__ = [1, 2, 3]
 [builtins fixtures/module_all.pyi]
 [out]
-main:2: error: Type of __all__ must be Sequence[str], not List[int]
+main:2: error: Type of __all__ must be "Sequence[str]", not "List[int]"
 
 [case testAllMustBeSequenceStr_python2]
 import typing
 __all__ = [1, 2, 3]
 [builtins_py2 fixtures/module_all_python2.pyi]
 [out]
-main:2: error: Type of __all__ must be Sequence[unicode], not List[int]
+main:2: error: Type of __all__ must be "Sequence[unicode]", not "List[int]"
 
 [case testAllUnicodeSequenceOK_python2]
 import typing
@@ -427,6 +427,25 @@ __all__ = [u'a', u'b', u'c']
 
 [out]
 
+[case testUnderscoreExportedValuesInImportAll]
+import typing
+from m import *
+_ = a
+_ = _b
+_ = __c__
+_ = ___d
+_ = e
+_ = f # E: Name 'f' is not defined
+_ = _g # E: Name '_g' is not defined
+[file m.py]
+__all__ = ['a']
+__all__ += ('_b',)
+__all__.append('__c__')
+__all__.extend(('___d', 'e'))
+
+a = _b = __c__ = ___d = e = f = _g = 1
+[builtins fixtures/module_all.pyi]
+
 [case testEllipsisInitializerInStubFileWithType]
 import m
 m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
@@ -456,11 +475,11 @@ def f(x: int = ...) -> None: pass
 [file m.pyi]
 def g(x: int = '') -> None: pass
 [out]
-tmp/m.pyi:1: error: Incompatible types in assignment (expression has type "str", variable has type "int")
-main:2: error: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
+tmp/m.pyi:1: error: Incompatible default for argument "x" (default has type "str", argument has type "int")
+main:2: error: Incompatible default for argument "x" (default has type "ellipsis", argument has type "int")
 
 [case testEllipsisDefaultArgValueInNonStub]
-def f(x: int = ...) -> None: pass # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
+def f(x: int = ...) -> None: pass # E: Incompatible default for argument "x" (default has type "ellipsis", argument has type "int")
 [out]
 
 [case testStarImportOverlapping]
@@ -565,7 +584,7 @@ x = ''
 def f(x): pass
 def g(x): pass
 try:
-    from m import f, g # E: Incompatible import of "g" (imported name has type Callable[[Any, Any], Any], local name has type Callable[[Any], Any])
+    from m import f, g # E: Incompatible import of "g" (imported name has type "Callable[[Any, Any], Any]", local name has type "Callable[[Any], Any]")
 except:
     pass
 [file m.py]
@@ -601,7 +620,7 @@ def f(x): pass
 try:
     from m import f
 except:
-    f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+    f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]")
 [file m.py]
 def f(): pass
 
@@ -611,7 +630,7 @@ from m import f
 def g() -> None:
     global f
     f = None
-    f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+    f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]")
 [file m.py]
 def f(): pass
 [out]
@@ -619,7 +638,7 @@ def f(): pass
 [case testAssignToFuncDefViaNestedModules]
 import m.n
 m.n.f = None
-m.n.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+m.n.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]")
 [file m/__init__.py]
 [file m/n.py]
 def f(): pass
@@ -628,7 +647,7 @@ def f(): pass
 [case testAssignToFuncDefViaModule]
 import m
 m.f = None
-m.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+m.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Callable[[], Any]")
 [file m.py]
 def f(): pass
 [out]
@@ -673,7 +692,7 @@ def f(x: str) -> None: pass
 None + 1
 [file m/a.py]
 [out]
-tmp/m/__init__.py:1: error: Unsupported left operand type for + (None)
+tmp/m/__init__.py:1: error: Unsupported left operand type for + ("None")
 
 [case testTypeCheckNamedModule2]
 # cmd: mypy -m m.a
@@ -681,7 +700,7 @@ tmp/m/__init__.py:1: error: Unsupported left operand type for + (None)
 [file m/a.py]
 None + 1
 [out]
-tmp/m/a.py:1: error: Unsupported left operand type for + (None)
+tmp/m/a.py:1: error: Unsupported left operand type for + ("None")
 
 [case testTypeCheckNamedModule3]
 # cmd: mypy -m m
@@ -689,7 +708,7 @@ tmp/m/a.py:1: error: Unsupported left operand type for + (None)
 None + 1
 [file m/a.py]
 [out]
-tmp/m/__init__.py:1: error: Unsupported left operand type for + (None)
+tmp/m/__init__.py:1: error: Unsupported left operand type for + ("None")
 
 [case testTypeCheckNamedModule4]
 # cmd: mypy -m m
@@ -704,7 +723,7 @@ None + ''  # Not analyzed.
 [file m.py]
 None + 1
 [out]
-tmp/m.py:1: error: Unsupported left operand type for + (None)
+tmp/m.py:1: error: Unsupported left operand type for + ("None")
 
 [case testTypeCheckNamedModuleWithImportCycle]
 # cmd: mypy -m m.a
@@ -914,8 +933,8 @@ def y() -> str: return "foo"
 class z: pass
 [out]
 main:2: error: Incompatible import of "x" (imported name has type "str", local name has type "int")
-main:2: error: Incompatible import of "y" (imported name has type Callable[[], str], local name has type Callable[[], int])
-main:2: error: Incompatible import of "z" (imported name has type Type[b.z], local name has type Type[a.z])
+main:2: error: Incompatible import of "y" (imported name has type "Callable[[], str]", local name has type "Callable[[], int]")
+main:2: error: Incompatible import of "z" (imported name has type "Type[b.z]", local name has type "Type[a.z]")
 
 -- Misc
 
@@ -955,19 +974,24 @@ tmp/foo/bar.py: note: (Using --follow-imports=error, submodule passed on command
 # cmd: mypy -m main
 # flags: --follow-imports=skip
 [file main.py]
-from stub import x # Permitted
-from other import y # Disallowed
-x + '' # Error here
-y + '' # But not here
+from stub import x, z  # Followed
+from other import y  # Not followed
+x + ''  # No error here
+y + ''  # No error here
+z + ''  # Error here
 [file stub.pyi]
-from non_stub import x
+from non_stub import x as x  # this import is not followed
+
+z = 42
 [file non_stub.py]
 x = 42
+
+x + ''  # no error because file is not analyzed
 [file other.py]
 y = 42
 [builtins fixtures/module.pyi]
 [out]
-tmp/main.py:3: error: Unsupported left operand type for + ("int")
+tmp/main.py:5: error: Unsupported left operand type for + ("int")
 
 [case testSilentSubmoduleImport]
 # cmd: mypy -m foo
@@ -1360,7 +1384,7 @@ def do_something(dic: Row) -> None:
 def do_another() -> Row:
     return {}
 
-do_something({'good': 'bad'}) # E: Dict entry 0 has incompatible type "str": "str"
+do_something({'good': 'bad'}) # E: Dict entry 0 has incompatible type "str": "str"; expected "str": "int"
 reveal_type(do_another()) # E: Revealed type is 'builtins.dict[builtins.str, builtins.int]'
 
 [file ex2a.py]
@@ -1640,3 +1664,277 @@ m = n  # E: Cannot assign multiple modules to name 'm' without explicit 'types.M
 [file n.py]
 
 [builtins fixtures/module.pyi]
+
+[case testNoReExportFromStubs]
+from stub import Iterable  # E: Module 'stub' has no attribute 'Iterable'
+from stub import C
+
+c = C()
+reveal_type(c.x)  # E: Revealed type is 'builtins.int'
+it: Iterable[int]
+reveal_type(it)  # E: Revealed type is 'Any'
+
+[file stub.pyi]
+from typing import Iterable
+from substub import C as C
+
+def fun(x: Iterable[str]) -> Iterable[int]: pass
+
+[file substub.pyi]
+class C:
+    x: int
+
+[builtins fixtures/module.pyi]
+
+[case testNoReExportFromStubsMemberType]
+import stub
+
+c = stub.C()
+reveal_type(c.x)  # E: Revealed type is 'builtins.int'
+it: stub.Iterable[int]  # E: Name 'stub.Iterable' is not defined
+reveal_type(it)  # E: Revealed type is 'Any'
+
+[file stub.pyi]
+from typing import Iterable
+from substub import C as C
+
+def fun(x: Iterable[str]) -> Iterable[int]: pass
+
+[file substub.pyi]
+class C:
+    x: int
+
+[builtins fixtures/module.pyi]
+
+[case testNoReExportFromStubsMemberVar]
+import stub
+
+reveal_type(stub.y)  # E: Revealed type is 'builtins.int'
+reveal_type(stub.z)  # E: Revealed type is 'Any' \
+                     # E: Module has no attribute "z"
+
+[file stub.pyi]
+from substub import y as y
+from substub import z
+
+[file substub.pyi]
+y = 42
+z: int
+
+[builtins fixtures/module.pyi]
+
+[case testReExportChildStubs]
+import mod
+from mod import submod
+
+reveal_type(mod.x)  # E: Revealed type is 'mod.submod.C'
+y = submod.C()
+reveal_type(y.a)  # E: Revealed type is 'builtins.str'
+
+[file mod/__init__.pyi]
+from . import submod
+x: submod.C
+
+[file mod/submod.pyi]
+class C:
+    a: str
+
+[builtins fixtures/module.pyi]
+
+[case testReExportChildStubs2]
+import mod.submod
+
+y = mod.submod.C()
+reveal_type(y.a)  # E: Revealed type is 'builtins.str'
+
+[file mod/__init__.pyi]
+from . import submod
+x: submod.C
+
+[file mod/submod.pyi]
+class C:
+    a: str
+
+[builtins fixtures/module.pyi]
+
+[case testNoReExportChildStubs]
+import mod
+from mod import C, D  # E: Module 'mod' has no attribute 'C'
+
+reveal_type(mod.x)  # E: Revealed type is 'mod.submod.C'
+mod.C  # E: Module has no attribute "C"
+y = mod.D()
+reveal_type(y.a)  # E: Revealed type is 'builtins.str'
+
+[file mod/__init__.pyi]
+from .submod import C, D as D
+x: C
+
+[file mod/submod.pyi]
+class C: pass
+class D:
+    a: str
+[builtins fixtures/module.pyi]
+
+[case testNoReExportNestedStub]
+from stub import substub  # E: Module 'stub' has no attribute 'substub'
+
+[file stub.pyi]
+import substub
+
+[file substub.pyi]
+x = 42
+
+[file mod/submod.pyi]
+
+[case testModuleAliasToQualifiedImport]
+import package.module
+alias = package.module
+reveal_type(alias.whatever('/'))  # E: Revealed type is 'builtins.str*'
+[file package/__init__.py]
+
+[file package/module.py]
+from typing import TypeVar
+T = TypeVar('T')
+def whatever(x: T) -> T: pass
+[builtins fixtures/module.pyi]
+
+[case testModuleAliasToQualifiedImport2]
+import mod
+import othermod
+alias = mod.submod
+reveal_type(alias.whatever('/'))  # E: Revealed type is 'builtins.str*'
+alias = othermod  # E: Cannot assign multiple modules to name 'alias' without explicit 'types.ModuleType' annotation
+[file mod.py]
+import submod
+[file submod.py]
+from typing import TypeVar
+T = TypeVar('T')
+def whatever(x: T) -> T: pass
+[file othermod.py]
+
+[builtins fixtures/module.pyi]
+
+[case testModuleLevelGetattr]
+import has_getattr
+
+reveal_type(has_getattr.any_attribute)  # E: Revealed type is 'Any'
+
+[file has_getattr.pyi]
+from typing import Any
+
+def __getattr__(name: str) -> Any: ...
+
+[builtins fixtures/module.pyi]
+
+[case testModuleLevelGetattrReturnType]
+import has_getattr
+
+reveal_type(has_getattr.any_attribute)  # E: Revealed type is 'builtins.str'
+
+[file has_getattr.pyi]
+def __getattr__(name: str) -> str: ...
+
+[builtins fixtures/module.pyi]
+
+[case testModuleLevelGetattrInvalidSignature]
+import has_getattr
+
+reveal_type(has_getattr.any_attribute)
+
+[file has_getattr.pyi]
+def __getattr__(x: int, y: str) -> str: ...
+
+[out]
+tmp/has_getattr.pyi:1: error: Invalid signature "def (builtins.int, builtins.str) -> builtins.str"
+main:3: error: Revealed type is 'builtins.str'
+
+[builtins fixtures/module.pyi]
+
+[case testModuleLevelGetattrNotCallable]
+import has_getattr
+
+reveal_type(has_getattr.any_attribute)  # E: Revealed type is 'Any'  # E: Module has no attribute "any_attribute"
+
+[file has_getattr.pyi]
+__getattr__ = 3
+
+[builtins fixtures/module.pyi]
+
+[case testModuleLevelGetattrUntyped]
+import has_getattr
+reveal_type(has_getattr.any_attribute)  # E: Revealed type is 'Any'
+
+[file has_getattr.pyi]
+def __getattr__(name): ...
+
+[builtins fixtures/module.pyi]
+
+[case testModuleLevelGetattrNotStub]
+
+import has_getattr
+reveal_type(has_getattr.any_attribute)
+
+[file has_getattr.py]
+def __getattr__(name): ...
+
+[out]
+tmp/has_getattr.py:1: error: __getattr__ is not valid at the module level outside a stub file
+main:3: error: Revealed type is 'Any'
+main:3: error: Module has no attribute "any_attribute"
+
+[builtins fixtures/module.pyi]
+
+[case testModuleLevelGetattribute]
+
+def __getattribute__(): ...  # E: __getattribute__ is not valid at the module level
+
+[case testModuleLevelGetattrImportFrom]
+from has_attr import name
+reveal_type(name)  # E: Revealed type is 'Any'
+
+[file has_attr.pyi]
+from typing import Any
+def __getattr__(name: str) -> Any: ...
+
+[builtins fixtures/module.pyi]
+
+[case testModuleLevelGetattrImportFromRetType]
+from has_attr import int_attr
+reveal_type(int_attr)  # E: Revealed type is 'builtins.int'
+
+[file has_attr.pyi]
+def __getattr__(name: str) -> int: ...
+
+[builtins fixtures/module.pyi]
+
+[case testModuleLevelGetattrImportFromNotStub]
+from non_stub import name
+reveal_type(name)
+
+[file non_stub.py]
+from typing import Any
+def __getattr__(name: str) -> Any: ...
+
+[out]
+tmp/non_stub.py:2: error: __getattr__ is not valid at the module level outside a stub file
+main:1: error: Module 'non_stub' has no attribute 'name'
+main:2: error: Revealed type is 'Any'
+
+[builtins fixtures/module.pyi]
+
+[case testModuleLevelGetattrImportFromAs]
+from has_attr import name as n
+reveal_type(name)
+reveal_type(n)
+
+[file has_attr.pyi]
+from typing import Any
+def __getattr__(name: str) -> Any: ...
+
+[out]
+main:2: error: Revealed type is 'Any'
+main:2: error: Name 'name' is not defined
+main:3: error: Revealed type is 'Any'
+
+[builtins fixtures/module.pyi]
diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test
index 7d313da..9726dad 100644
--- a/test-data/unit/check-namedtuple.test
+++ b/test-data/unit/check-namedtuple.test
@@ -221,7 +221,7 @@ a = (1,)  # E: Incompatible types in assignment (expression has type "Tuple[int]
 [case testNamedTupleMissingClassAttribute]
 import collections
 MyNamedTuple = collections.namedtuple('MyNamedTuple', ['spam', 'eggs'])
-MyNamedTuple.x # E: Type[MyNamedTuple] has no attribute "x"
+MyNamedTuple.x # E: "Type[MyNamedTuple]" has no attribute "x"
 
 
 [case testNamedTupleEmptyItems]
@@ -286,7 +286,7 @@ from typing import NamedTuple
 
 X = NamedTuple('X', [('x', int), ('y', str)])
 reveal_type(X._make([5, 'a']))  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
-X._make('a b')  # E: Argument 1 to X._make has incompatible type "str"; expected Iterable[Any]
+X._make('a b')  # E: Argument 1 to X._make has incompatible type "str"; expected "Iterable[Any]"
 
 -- # FIX: not a proper class method
 -- x = None  # type: X
@@ -415,7 +415,7 @@ b = B._make([''])  # type: B
 [case testNamedTupleIncompatibleRedefinition]
 from typing import NamedTuple
 class Crash(NamedTuple):
-    count: int  # E: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as Callable[[Tuple[Any, ...], Any], int])
+    count: int  # E: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], Any], int]")
 [builtins fixtures/tuple.pyi]
 
 [case testNamedTupleInClassNamespace]
@@ -426,7 +426,7 @@ class C:
         A = NamedTuple('A', [('x', int)])
     def g(self):
         A = NamedTuple('A', [('y', int)])
-C.A  # E: Type[C] has no attribute "A"
+C.A  # E: "Type[C]" has no attribute "A"
 
 [case testNamedTupleInFunction]
 from typing import NamedTuple
@@ -434,6 +434,20 @@ def f() -> None:
     A = NamedTuple('A', [('x', int)])
 A  # E: Name 'A' is not defined
 
+[case testNamedTupleForwardAsUpperBound]
+from typing import NamedTuple, TypeVar, Generic
+T = TypeVar('T', bound='M')
+class G(Generic[T]):
+    x: T
+
+yb: G[int] # E: Type argument "builtins.int" of "G" must be a subtype of "Tuple[builtins.int, fallback=__main__.M]"
+yg: G[M]
+reveal_type(G[M]().x.x)  # E: Revealed type is 'builtins.int'
+reveal_type(G[M]().x[0])  # E: Revealed type is 'builtins.int'
+
+M = NamedTuple('M', [('x', int)])
+[out]
+
 [case testNamedTupleWithImportCycle]
 import a
 [file a.py]
@@ -447,14 +461,173 @@ class X(N): pass
 import a
 
 def f(x: a.X) -> None:
-    # The type of x is broken (https://github.com/python/mypy/issues/3016) but we need to
-    # do something reasonable here to avoid a regression.
     reveal_type(x)
     x = a.X(1)
     reveal_type(x)
 [out]
-tmp/b.py:6: error: Revealed type is 'a.X'
-tmp/b.py:8: error: Revealed type is 'Tuple[Any, fallback=a.X]'
+tmp/b.py:4: error: Revealed type is 'Tuple[Any, fallback=a.X]'
+tmp/b.py:6: error: Revealed type is 'Tuple[Any, fallback=a.X]'
+
+[case testNamedTupleWithImportCycle2]
+import a
+[file a.py]
+from collections import namedtuple
+from b import f
+
+N = namedtuple('N', 'a')
+[file b.py]
+import a
+
+def f(x: a.N) -> None:
+    reveal_type(x)
+    x = a.N(1)
+    reveal_type(x)
+[out]
+tmp/b.py:4: error: Revealed type is 'Tuple[Any, fallback=a.N]'
+tmp/b.py:6: error: Revealed type is 'Tuple[Any, fallback=a.N]'
+
+[case testSimpleSelfReferrentialNamedTuple]
+from typing import NamedTuple
+class MyNamedTuple(NamedTuple):
+    parent: 'MyNamedTuple'
+
+def bar(nt: MyNamedTuple) -> MyNamedTuple:
+    return nt
+
+x: MyNamedTuple
+reveal_type(x.parent)
+[out]
+main:2: error: Recursive types not fully supported yet, nested types replaced with "Any"
+main:9: error: Revealed type is 'Tuple[Any, fallback=__main__.MyNamedTuple]'
+
+-- Some crazy selef-referential named tuples and types dicts
+-- to be sure that everything works
+
+[case testCrossFileNamedTupleForwardRefs]
+import a
+[file a.py]
+import b
+from typing import Any, NamedTuple
+
+class A:
+    def a(self, b: 'b.B') -> str:
+        return 'a'
+ATuple = NamedTuple('ATuple', [('a', Any)])
+
+[file b.py]
+import a
+
+class B:
+    def b(self, a: 'a.A') -> str:
+        return 'b'
+    def aWithTuple(self, atuple: 'a.ATuple') -> str:
+        return 'a'
+[out]
+
+[case testSelfRefNT1]
+from typing import Tuple, NamedTuple
+
+Node = NamedTuple('Node', [ # E: Recursive types not fully supported yet, nested types replaced with "Any"
+        ('name', str),
+        ('children', Tuple['Node', ...]),
+    ])
+n: Node
+reveal_type(n) # E: Revealed type is 'Tuple[builtins.str, builtins.tuple[Tuple[builtins.str, builtins.tuple[Any], fallback=__main__.Node]], fallback=__main__.Node]'
+[builtins fixtures/tuple.pyi]
+
+
+[case testSelfRefNT2]
+from typing import Tuple, NamedTuple
+
+A = NamedTuple('A', [ # E
+        ('x', str),
+        ('y', Tuple['B', ...]),
+    ])
+class B(NamedTuple): # E
+    x: A
+    y: int
+
+n: A
+reveal_type(n) # E: Revealed type is 'Tuple[builtins.str, builtins.tuple[Tuple[Tuple[builtins.str, builtins.tuple[Any], fallback=__main__.A], builtins.int, fallback=__main__.B]], fallback=__main__.A]'
+[builtins fixtures/tuple.pyi]
+[out]
+main:3: error: Recursive types not fully supported yet, nested types replaced with "Any"
+main:7: error: Recursive types not fully supported yet, nested types replaced with "Any"
+
+[case testSelfRefNT3]
+from typing import NamedTuple, Tuple
+
+class B(NamedTuple): # E
+    x: Tuple[A, int]
+    y: int
+
+A = NamedTuple('A', [ # E: Recursive types not fully supported yet, nested types replaced with "Any"
+        ('x', str),
+        ('y', 'B'),
+    ])
+n: B
+m: A
+reveal_type(n.x) # E: Revealed type is 'Tuple[Tuple[builtins.str, Tuple[Tuple[Any, builtins.int], builtins.int, fallback=__main__.B], fallback=__main__.A], builtins.int]'
+reveal_type(m[0]) # E: Revealed type is 'builtins.str'
+lst = [m, n]
+reveal_type(lst[0]) # E: Revealed type is 'Tuple[builtins.object, builtins.object]'
+[builtins fixtures/tuple.pyi]
+[out]
+main:3: error: Recursive types not fully supported yet, nested types replaced with "Any"
+
+[case testSelfRefNT4]
+from typing import NamedTuple
+
+class B(NamedTuple): # E
+    x: A
+    y: int
+
+class A(NamedTuple): # E
+    x: str
+    y: B
+
+n: A
+reveal_type(n.y[0]) # E: Revealed type is 'Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B], fallback=__main__.A]'
+[builtins fixtures/tuple.pyi]
+[out]
+main:3: error: Recursive types not fully supported yet, nested types replaced with "Any"
+main:7: error: Recursive types not fully supported yet, nested types replaced with "Any"
+
+[case testSelfRefNT5]
+from typing import NamedTuple
+
+B = NamedTuple('B', [ # E: Recursive types not fully supported yet, nested types replaced with "Any"
+        ('x', A),
+        ('y', int),
+    ])
+A = NamedTuple('A', [ # E: Recursive types not fully supported yet, nested types replaced with "Any"
+        ('x', str),
+        ('y', 'B'),
+    ])
+n: A
+def f(m: B) -> None: pass
+reveal_type(n) # E: Revealed type is 'Tuple[builtins.str, Tuple[Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B], fallback=__main__.A], builtins.int, fallback=__main__.B], fallback=__main__.A]'
+reveal_type(f) # E: Revealed type is 'def (m: Tuple[Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B], fallback=__main__.A], builtins.int, fallback=__main__.B])'
+[builtins fixtures/tuple.pyi]
+
+[case testRecursiveNamedTupleInBases]
+from typing import List, NamedTuple, Union
+
+Exp = Union['A', 'B']  # E: Recursive types not fully supported yet, nested types replaced with "Any"
+class A(NamedTuple('A', [('attr', List[Exp])])): pass
+class B(NamedTuple('B', [('val', object)])): pass
+
+def my_eval(exp: Exp) -> int:
+    reveal_type(exp) # E: Revealed type is 'Union[Tuple[builtins.list[Any], fallback=__main__.A], Tuple[builtins.object, fallback=__main__.B]]'
+    if isinstance(exp, A):
+        my_eval(exp[0][0])
+        return my_eval(exp.attr[0])
+    if isinstance(exp, B):
+        return exp.val  # E: Incompatible return value type (got "object", expected "int")
+
+my_eval(A([B(1), B(2)])) # OK
+[builtins fixtures/isinstancelist.pyi]
+[out]
 
 [case testForwardReferenceInNamedTuple]
 from typing import NamedTuple
diff --git a/test-data/unit/check-newsyntax.test b/test-data/unit/check-newsyntax.test
index 645fbe5..2ed26e4 100644
--- a/test-data/unit/check-newsyntax.test
+++ b/test-data/unit/check-newsyntax.test
@@ -29,7 +29,7 @@ from typing import Dict, Any
 d: Dict[int, str] = {}
 d[42] = 'ab'
 d[42] = 42  # E: Incompatible types in assignment (expression has type "int", target has type "str")
-d['ab'] = 'ab'  # E: Invalid index type "str" for Dict[int, str]; expected type "int"
+d['ab'] = 'ab'  # E: Invalid index type "str" for "Dict[int, str]"; expected type "int"
 [builtins fixtures/dict.pyi]
 [out]
 
@@ -61,29 +61,29 @@ TstInstance().a = 'ab'
 [case testNewSyntaxWithClassVars]
 # flags: --strict-optional --python-version 3.6
 class CCC:
-    a: str = None  # E: Incompatible types in assignment (expression has type None, variable has type "str")
+    a: str = None  # E: Incompatible types in assignment (expression has type "None", variable has type "str")
 [out]
 
 [case testNewSyntaxWithStrictOptional]
 # flags: --strict-optional --python-version 3.6
 strict: int
-strict = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
-strict2: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+strict = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
+strict2: int = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 [out]
 
 [case testNewSyntaxWithStrictOptionalFunctions]
 # flags: --strict-optional --python-version 3.6
 def f() -> None:
     x: int
-    x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+    x = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 [out]
 
 [case testNewSyntaxWithStrictOptionalClasses]
 # flags: --strict-optional --python-version 3.6
 class C:
     def meth(self) -> None:
-        x: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
-        self.x: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+        x: int = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
+        self.x: int = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 [out]
 
 [case testNewSyntaxSpecialAssign]
diff --git a/test-data/unit/check-newtype.test b/test-data/unit/check-newtype.test
index 2af1fbf..badd948 100644
--- a/test-data/unit/check-newtype.test
+++ b/test-data/unit/check-newtype.test
@@ -55,7 +55,7 @@ from typing import NewType, List
 UserId = NewType('UserId', int)
 IdList = NewType('IdList', List[UserId])
 
-bad1 = IdList([1])  # E: List item 0 has incompatible type "int"
+bad1 = IdList([1])  # E: List item 0 has incompatible type "int"; expected "UserId"
 
 foo = IdList([])
 foo.append(3)  # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "UserId"
@@ -276,17 +276,17 @@ f = NewType('f', tp=int)  # E: NewType(...) expects exactly two positional argum
 
 [case testNewTypeWithAnyFails]
 from typing import NewType, Any
-A = NewType('A', Any)  # E: Argument 2 to NewType(...) must be subclassable (got Any)
+A = NewType('A', Any)  # E: Argument 2 to NewType(...) must be subclassable (got "Any")
 [out]
 
 [case testNewTypeWithUnionsFails]
 from typing import NewType, Union
-Foo = NewType('Foo', Union[int, float])  # E: Argument 2 to NewType(...) must be subclassable (got Union[builtins.int, builtins.float])
+Foo = NewType('Foo', Union[int, float])  # E: Argument 2 to NewType(...) must be subclassable (got "Union[int, float]")
 [out]
 
 [case testNewTypeWithTypeTypeFails]
 from typing import NewType, Type
-Foo = NewType('Foo', Type[int])  # E: Argument 2 to NewType(...) must be subclassable (got Type[builtins.int])
+Foo = NewType('Foo', Type[int])  # E: Argument 2 to NewType(...) must be subclassable (got "Type[int]")
 a = Foo(type(3))
 [builtins fixtures/args.pyi]
 [out]
@@ -332,6 +332,22 @@ B = NewType('B', A)
 class C(B): pass  # E: Cannot subclass NewType
 [out]
 
+[case testCannotUseNewTypeWithProtocols]
+from typing import Protocol, NewType
+
+class P(Protocol):
+    attr: int
+class D:
+    attr: int
+
+C = NewType('C', P)  # E: NewType cannot be used with protocol classes
+
+x: C = C(D())  # We still accept this, treating 'C' as non-protocol subclass.
+reveal_type(x.attr)  # E: Revealed type is 'builtins.int'
+x.bad_attr  # E: "C" has no attribute "bad_attr"
+C(1)  # E: Argument 1 to "C" has incompatible type "int"; expected "P"
+[out]
+
 [case testNewTypeAny]
 from typing import NewType
 Any = NewType('Any', int)
diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test
index ee05524..1386432 100644
--- a/test-data/unit/check-optional.test
+++ b/test-data/unit/check-optional.test
@@ -2,11 +2,11 @@
 
 [case testImplicitNoneType]
 x = None
-x()  # E: None not callable
+x()  # E: "None" not callable
 
 [case testExplicitNoneType]
 x = None  # type: None
-x()  # E: None not callable
+x()  # E: "None" not callable
 
 [case testNoneMemberOfOptional]
 from typing import Optional
@@ -19,12 +19,12 @@ x = 0  # type: Optional[int]
 [case testNoneNotMemberOfType]
 x = None  # type: int
 [out]
-main:1: error: Incompatible types in assignment (expression has type None, variable has type "int")
+main:1: error: Incompatible types in assignment (expression has type "None", variable has type "int")
 
 [case testTypeNotMemberOfNone]
 x = 0  # type: None
 [out]
-main:1: error: Incompatible types in assignment (expression has type "int", variable has type None)
+main:1: error: Incompatible types in assignment (expression has type "int", variable has type "None")
 
 [case testOptionalNotMemberOfType]
 from typing import Optional
@@ -127,7 +127,7 @@ f(None)
 
 [case testNoInferOptionalFromDefaultNone]
 # flags: --no-implicit-optional
-def f(x: int = None) -> None:  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+def f(x: int = None) -> None:  # E: Incompatible default for argument "x" (default has type "None", argument has type "int")
   pass
 [out]
 
@@ -140,7 +140,7 @@ f(None)
 
 [case testNoInferOptionalFromDefaultNoneComment]
 # flags: --no-implicit-optional
-def f(x=None):  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+def f(x=None):  # E: Incompatible default for argument "x" (default has type "None", argument has type "int")
   # type: (int) -> None
   pass
 [out]
@@ -180,13 +180,13 @@ reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
 
 [case testInferOptionalListType]
 x = [None]
-x.append(1)  # E: Argument 1 to "append" of "list" has incompatible type "int"; expected None
+x.append(1)  # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "None"
 [builtins fixtures/list.pyi]
 
 [case testInferNonOptionalListType]
 x = []
 x.append(1)
-x()  # E: List[int] not callable
+x()  # E: "List[int]" not callable
 [builtins fixtures/list.pyi]
 
 [case testInferOptionalDictKeyValueTypes]
@@ -194,13 +194,13 @@ x = {None: None}
 x["bar"] = 1
 [builtins fixtures/dict.pyi]
 [out]
-main:2: error: Invalid index type "str" for Dict[None, None]; expected type None
-main:2: error: Incompatible types in assignment (expression has type "int", target has type None)
+main:2: error: Invalid index type "str" for "Dict[None, None]"; expected type "None"
+main:2: error: Incompatible types in assignment (expression has type "int", target has type "None")
 
 [case testInferNonOptionalDictType]
 x = {}
 x["bar"] = 1
-x()  # E: Dict[str, int] not callable
+x()  # E: "Dict[str, int]" not callable
 [builtins fixtures/dict.pyi]
 
 [case testNoneClassVariable]
@@ -215,7 +215,7 @@ from typing import Optional
 class C:
     x = None  # type: int
     def __init__(self) -> None:
-        self.x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+        self.x = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 [out]
 
 [case testMultipleAssignmentNoneClassVariableInInit]
@@ -223,8 +223,8 @@ from typing import Optional
 class C:
     x, y = None, None  # type: int, str
     def __init__(self) -> None:
-        self.x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
-        self.y = None  # E: Incompatible types in assignment (expression has type None, variable has type "str")
+        self.x = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
+        self.y = None  # E: Incompatible types in assignment (expression has type "None", variable has type "str")
 [out]
 
 [case testOverloadWithNone]
@@ -455,7 +455,7 @@ reveal_type(l)  # E: Revealed type is 'builtins.list[typing.Generator*[builtins.
 [builtins fixtures/list.pyi]
 
 [case testNoneListTernary]
-x = [None] if "" else [1]  # E: List item 0 has incompatible type "int"
+x = [None] if "" else [1]  # E: List item 0 has incompatible type "int"; expected "None"
 [builtins fixtures/list.pyi]
 
 [case testListIncompatibleErrorMessage]
@@ -465,7 +465,7 @@ def foo(l: List[Callable[[], str]]) -> None: pass
 def f() -> int:
     return 42
 
-foo([f])  # E: List item 0 has incompatible type Callable[[], int]
+foo([f])  # E: List item 0 has incompatible type "Callable[[], int]"; expected "Callable[[], str]"
 [builtins fixtures/list.pyi]
 
 [case testInferEqualsNotOptional]
@@ -548,6 +548,21 @@ if x is not None:
 
 [builtins fixtures/ops.pyi]
 
+[case testOptionalTypeNarrowedInBooleanStatement]
+from typing import Optional
+
+x: Optional[int] = None
+x is not None and x + 42
+x is not None and x + '42'  # E: Unsupported operand types for + ("int" and "str")
+[builtins fixtures/isinstance.pyi]
+
+[case testInvalidBooleanBranchIgnored]
+from typing import Optional
+
+x = None
+x is not None and x + 42
+[builtins fixtures/isinstance.pyi]
+
 [case testOptionalLambdaInference]
 from typing import Optional, Callable
 f = None # type: Optional[Callable[[int], None]]
@@ -605,6 +620,20 @@ reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins
 y: Optional[Union[int, None]]
 reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
 
+[case testOverloadWithNoneAndOptional]
+from typing import overload, Optional
+
+ at overload
+def f(x: int) -> str: ...
+ at overload
+def f(x: Optional[int]) -> Optional[str]: ...
+def f(x): return x
+
+reveal_type(f(1)) # E: Revealed type is 'builtins.str'
+reveal_type(f(None)) # E: Revealed type is 'Union[builtins.str, builtins.None]'
+x: Optional[int]
+reveal_type(f(x)) # E: Revealed type is 'Union[builtins.str, builtins.None]'
+
 [case testUnionTruthinessTracking]
 from typing import Optional, Any
 def test_or_shortcut(value: Optional[Any]) -> None:
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index f3e5b99..74b39fd 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -447,7 +447,7 @@ from foo import *
 from typing import overload
 t, a = None, None # type: (type, A)
 
-a = A # E: Incompatible types in assignment (expression has type Type[A], variable has type "A")
+a = A # E: Incompatible types in assignment (expression has type "Type[A]", variable has type "A")
 t = A
 
 class A:
@@ -610,7 +610,7 @@ n = 1
 m = 1
 n = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
 m = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
-f(list_object) # E: Argument 1 to "f" has incompatible type List[object]; expected List[int]
+f(list_object) # E: Argument 1 to "f" has incompatible type "List[object]"; expected "List[int]"
 [builtins fixtures/list.pyi]
 
 [case testOverlappingOverloadSignatures]
@@ -901,7 +901,7 @@ def f(x: int, y: List[int] = None) -> int: pass
 def f(x: int, y: List[str] = None) -> int: pass
 f(y=[1], x=0)() # E: "int" not callable
 f(y=[''], x=0)() # E: "int" not callable
-a = f(y=[['']], x=0) # E: List item 0 has incompatible type List[str]
+a = f(y=[['']], x=0) # E: List item 0 has incompatible type "List[str]"; expected "int"
 a() # E: "int" not callable
 [builtins fixtures/list.pyi]
 
@@ -957,8 +957,8 @@ def g(x: U, y: V) -> None:
     f(x)() # E: "mystr" not callable
     f(y) # E: No overload variant of "f" matches argument types [V`-2]
     a = f([x]) # E: "f" does not return a value
-    f([y]) # E: Type argument 1 of "f" has incompatible value "V"
-    f([x, y]) # E: Type argument 1 of "f" has incompatible value "object"
+    f([y]) # E: Value of type variable "T" of "f" cannot be "V"
+    f([x, y]) # E: Value of type variable "T" of "f" cannot be "object"
 [builtins fixtures/list.pyi]
 [out]
 
@@ -998,10 +998,10 @@ def g(x: int, *a: AnyStr) -> None: pass
 
 g('foo')
 g('foo', 'bar')
-g('foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object"
+g('foo', b'bar') # E: Value of type variable "AnyStr" of "g" cannot be "object"
 g(1)
 g(1, 'foo')
-g(1, 'foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object"
+g(1, 'foo', b'bar') # E: Value of type variable "AnyStr" of "g" cannot be "object"
 [builtins fixtures/primitives.pyi]
 
 [case testBadOverlapWithTypeVarsWithValues]
@@ -1117,14 +1117,14 @@ def f(x: int, y: Tuple[str, ...]) -> None: pass
 @overload
 def f(x: int, y: str) -> None: pass
 f(1, ('2', '3'))
-f(1, (2, '3')) # E: Argument 2 to "f" has incompatible type "Tuple[int, str]"; expected Tuple[str, ...]
+f(1, (2, '3')) # E: Argument 2 to "f" has incompatible type "Tuple[int, str]"; expected "Tuple[str, ...]"
 f(1, ('2',))
 f(1, '2')
-f(1, (2, 3)) # E: Argument 2 to "f" has incompatible type "Tuple[int, int]"; expected Tuple[str, ...]
+f(1, (2, 3)) # E: Argument 2 to "f" has incompatible type "Tuple[int, int]"; expected "Tuple[str, ...]"
 x = ('2', '3')  # type: Tuple[str, ...]
 f(1, x)
 y = (2, 3)  # type: Tuple[int, ...]
-f(1, y) # E: Argument 2 to "f" has incompatible type Tuple[int, ...]; expected Tuple[str, ...]
+f(1, y) # E: Argument 2 to "f" has incompatible type "Tuple[int, ...]"; expected "Tuple[str, ...]"
 [builtins fixtures/tuple.pyi]
 
 [case testCallableSpecificOverload]
@@ -1157,3 +1157,118 @@ class Test(object):
 
 t = Test()
 reveal_type(t.do_chain)  # E: Revealed type is '__main__.Chain'
+
+[case testOverloadWithOverlappingItemsAndAnyArgument1]
+from typing import overload, Any
+
+ at overload
+def f(x: int) -> int: ...
+ at overload
+def f(x: object) -> object: ...
+def f(x): pass
+
+a: Any
+reveal_type(f(a))  # E: Revealed type is 'Any'
+
+[case testOverloadWithOverlappingItemsAndAnyArgument2]
+from typing import overload, Any
+
+ at overload
+def f(x: int) -> int: ...
+ at overload
+def f(x: float) -> float: ...
+def f(x): pass
+
+a: Any
+reveal_type(f(a))  # E: Revealed type is 'Any'
+
+[case testOverloadWithOverlappingItemsAndAnyArgument3]
+from typing import overload, Any
+
+ at overload
+def f(x: int) -> int: ...
+ at overload
+def f(x: str) -> str: ...
+def f(x): pass
+
+a: Any
+reveal_type(f(a))  # E: Revealed type is 'Any'
+
+[case testOverloadWithOverlappingItemsAndAnyArgument4]
+from typing import overload, Any
+
+ at overload
+def f(x: int, y: int, z: str) -> int: ...
+ at overload
+def f(x: object, y: int, z: str) -> object: ...
+def f(x): pass
+
+a: Any
+# Any causes ambiguity
+reveal_type(f(a, 1, ''))  # E: Revealed type is 'Any'
+# Any causes no ambiguity
+reveal_type(f(1, a, a))  # E: Revealed type is 'builtins.int'
+reveal_type(f('', a, a))  # E: Revealed type is 'builtins.object'
+# Like above, but use keyword arguments.
+reveal_type(f(y=1, z='', x=a))  # E: Revealed type is 'Any'
+reveal_type(f(y=a, z='', x=1))  # E: Revealed type is 'builtins.int'
+reveal_type(f(z='', x=1, y=a))  # E: Revealed type is 'builtins.int'
+reveal_type(f(z='', x=a, y=1))  # E: Revealed type is 'Any'
+
+[case testOverloadWithOverlappingItemsAndAnyArgument5]
+from typing import overload, Any, Union
+
+ at overload
+def f(x: int) -> int: ...
+ at overload
+def f(x: Union[int, float]) -> float: ...
+def f(x): pass
+
+a: Any
+reveal_type(f(a))  # E: Revealed type is 'Any'
+
+[case testOverloadWithOverlappingItemsAndAnyArgument6]
+from typing import overload, Any
+
+ at overload
+def f(x: int, y: int) -> int: ...
+ at overload
+def f(x: float, y: int, z: str) -> float: ...
+ at overload
+def f(x: object, y: int, z: str, a: None) -> object: ...
+def f(x): pass
+
+a: Any
+# Any causes ambiguity
+reveal_type(f(*a))  # E: Revealed type is 'Any'
+reveal_type(f(a, *a))  # E: Revealed type is 'Any'
+reveal_type(f(1, *a))  # E: Revealed type is 'Any'
+reveal_type(f(1.1, *a))  # E: Revealed type is 'Any'
+reveal_type(f('', *a))  # E: Revealed type is 'builtins.object'
+
+[case testOverloadWithOverlappingItemsAndAnyArgument7]
+from typing import overload, Any
+
+ at overload
+def f(x: int, y: int, z: int) -> int: ...
+ at overload
+def f(x: object, y: int, z: int) -> object: ...
+def f(x): pass
+
+a: Any
+# TODO: We could infer 'int' here
+reveal_type(f(1, *a))  # E: Revealed type is 'Any'
+
+[case testOverloadWithOverlappingItemsAndAnyArgument8]
+from typing import overload, Any
+
+ at overload
+def f(x: int, y: int, z: int) -> str: ...
+ at overload
+def f(x: object, y: int, z: int) -> str: ...
+def f(x): pass
+
+a: Any
+# The return type is not ambiguous so Any arguments cause no ambiguity.
+reveal_type(f(a, 1, 1)) # E: Revealed type is 'builtins.str'
+reveal_type(f(1, *a))  # E: Revealed type is 'builtins.str'
diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test
new file mode 100644
index 0000000..1da2c1f
--- /dev/null
+++ b/test-data/unit/check-protocols.test
@@ -0,0 +1,2181 @@
+-- Simple protocol types
+-- ---------------------
+
+[case testCannotInstantiateProtocol]
+from typing import Protocol
+
+class P(Protocol):
+    def meth(self) -> None:
+        pass
+
+P() # E: Cannot instantiate protocol class "P"
+
+[case testSimpleProtocolOneMethod]
+from typing import Protocol
+
+class P(Protocol):
+    def meth(self) -> None:
+        pass
+
+class B: pass
+class C:
+    def meth(self) -> None:
+        pass
+
+x: P
+def fun(x: P) -> None:
+    x.meth()
+    x.meth(x) # E: Too many arguments for "meth" of "P"
+    x.bad # E: "P" has no attribute "bad"
+
+x = C()
+x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P")
+
+fun(C())
+fun(B()) # E: Argument 1 to "fun" has incompatible type "B"; expected "P"
+
+def fun2() -> P:
+    return C()
+def fun3() -> P:
+    return B() # E: Incompatible return value type (got "B", expected "P")
+
+[case testSimpleProtocolOneAbstractMethod]
+from typing import Protocol
+from abc import abstractmethod
+
+class P(Protocol):
+    @abstractmethod
+    def meth(self) -> None:
+        pass
+
+class B: pass
+class C:
+    def meth(self) -> None:
+        pass
+class D(B):
+    def meth(self) -> None:
+        pass
+
+x: P
+def fun(x: P) -> None:
+    x.meth()
+    x.meth(x) # E: Too many arguments for "meth" of "P"
+    x.bad # E: "P" has no attribute "bad"
+
+x = C()
+x = D()
+x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P")
+fun(C())
+fun(D())
+fun(B()) # E: Argument 1 to "fun" has incompatible type "B"; expected "P"
+fun(x)
+
+[case testProtocolMethodBodies]
+from typing import Protocol, List
+
+class P(Protocol):
+    def meth(self) -> int:
+        return 'no way' # E: Incompatible return value type (got "str", expected "int")
+
+# explicit ellipsis is OK in protocol methods
+class P2(Protocol):
+    def meth2(self) -> List[int]:
+        ...
+[builtins fixtures/list.pyi]
+
+[case testSimpleProtocolOneMethodOverride]
+from typing import Protocol, Union
+
+class P(Protocol):
+    def meth(self) -> Union[int, str]:
+        pass
+class SubP(P, Protocol):
+    def meth(self) -> int:
+        pass
+
+class B: pass
+class C:
+    def meth(self) -> int:
+        pass
+z: P
+x: SubP
+def fun(x: SubP) -> str:
+    x.bad # E: "SubP" has no attribute "bad"
+    return x.meth() # E: Incompatible return value type (got "int", expected "str")
+
+z = x
+x = C()
+x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "SubP")
+
+reveal_type(fun(C())) # E: Revealed type is 'builtins.str'
+fun(B()) # E: Argument 1 to "fun" has incompatible type "B"; expected "SubP"
+
+[case testSimpleProtocolTwoMethodsMerge]
+from typing import Protocol
+
+class P1(Protocol):
+    def meth1(self) -> int:
+        pass
+class P2(Protocol):
+    def meth2(self) -> str:
+        pass
+class P(P1, P2, Protocol): pass
+
+class B: pass
+class C1:
+    def meth1(self) -> int:
+        pass
+class C2(C1):
+    def meth2(self) -> str:
+        pass
+class C:
+    def meth1(self) -> int:
+        pass
+    def meth2(self) -> str:
+        pass
+
+class AnotherP(Protocol):
+    def meth1(self) -> int:
+        pass
+    def meth2(self) -> str:
+        pass
+
+x: P
+reveal_type(x.meth1())  # E: Revealed type is 'builtins.int'
+reveal_type(x.meth2())  # E: Revealed type is 'builtins.str'
+
+c: C
+c1: C1
+c2: C2
+y: AnotherP
+
+x = c
+x = B()  # E: Incompatible types in assignment (expression has type "B", variable has type "P")
+x = c1 # E: Incompatible types in assignment (expression has type "C1", variable has type "P") \
+    # N: 'C1' is missing following 'P' protocol member: \
+    # N:     meth2
+x = c2
+x = y
+y = x
+
+[case testSimpleProtocolTwoMethodsExtend]
+from typing import Protocol
+
+class P1(Protocol):
+    def meth1(self) -> int:
+        pass
+class P2(P1, Protocol):
+    def meth2(self) -> str:
+        pass
+
+class Cbad:
+    def meth1(self) -> int:
+        pass
+
+class C:
+    def meth1(self) -> int:
+        pass
+    def meth2(self) -> str:
+        pass
+
+x: P2
+reveal_type(x.meth1()) # E: Revealed type is 'builtins.int'
+reveal_type(x.meth2()) # E: Revealed type is 'builtins.str'
+
+x = C() # OK
+x = Cbad() # E: Incompatible types in assignment (expression has type "Cbad", variable has type "P2") \
+    # N: 'Cbad' is missing following 'P2' protocol member: \
+    # N:     meth2
+
+[case testProtocolMethodVsAttributeErrors]
+from typing import Protocol
+
+class P(Protocol):
+    def meth(self) -> int:
+        pass
+class C:
+    meth: int
+x: P = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P") \
+           # N: Following member(s) of "C" have conflicts: \
+           # N:     meth: expected "Callable[[], int]", got "int"
+
+[case testProtocolMethodVsAttributeErrors2]
+from typing import Protocol
+
+class P(Protocol):
+    @property
+    def meth(self) -> int:
+        pass
+class C:
+    def meth(self) -> int:
+        pass
+x: P = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P") \
+           # N: Following member(s) of "C" have conflicts: \
+           # N:     meth: expected "int", got "Callable[[], int]"
+[builtins fixtures/property.pyi]
+
+[case testCannotAssignNormalToProtocol]
+from typing import Protocol
+
+class P(Protocol):
+    def meth(self) -> int:
+        pass
+class C:
+    def meth(self) -> int:
+        pass
+
+x: C
+y: P
+x = y # E: Incompatible types in assignment (expression has type "P", variable has type "C")
+
+[case testIndependentProtocolSubtyping]
+from typing import Protocol
+
+class P1(Protocol):
+    def meth(self) -> int:
+        pass
+class P2(Protocol):
+    def meth(self) -> int:
+        pass
+
+x1: P1
+x2: P2
+
+x1 = x2
+x2 = x1
+
+def f1(x: P1) -> None: pass
+def f2(x: P2) -> None: pass
+
+f1(x2)
+f2(x1)
+
+[case testNoneDisablesProtocolImplementation]
+from typing import Protocol
+
+class MyHashable(Protocol):
+    def __my_hash__(self) -> int:
+        return 0
+
+class C:
+    __my_hash__ = None
+
+var: MyHashable = C()  # E: Incompatible types in assignment (expression has type "C", variable has type "MyHashable")
+
+[case testNoneDisablesProtocolSubclassingWithStrictOptional]
+# flags: --strict-optional
+from typing import Protocol
+
+class MyHashable(Protocol):
+    def __my_hash__(self) -> int:
+        return 0
+
+class C(MyHashable):
+    __my_hash__ = None  # E: Incompatible types in assignment \
+(expression has type "None", base class "MyHashable" defined the type as "Callable[[MyHashable], int]")
+
+[case testProtocolsWithNoneAndStrictOptional]
+# flags: --strict-optional
+from typing import Protocol
+class P(Protocol):
+    x = 0  # type: int
+
+class C:
+    x = None
+
+x: P = C() # Error!
+def f(x: P) -> None: pass
+f(C()) # Error!
+[out]
+main:9: error: Incompatible types in assignment (expression has type "C", variable has type "P")
+main:9: note: Following member(s) of "C" have conflicts:
+main:9: note:     x: expected "int", got "None"
+main:11: error: Argument 1 to "f" has incompatible type "C"; expected "P"
+main:11: note: Following member(s) of "C" have conflicts:
+main:11: note:     x: expected "int", got "None"
+
+-- Semanal errors in protocol types
+-- --------------------------------
+
+[case testBasicSemanalErrorsInProtocols]
+from typing import Protocol, Generic, TypeVar, Iterable
+
+T = TypeVar('T', covariant=True)
+S = TypeVar('S', covariant=True)
+
+class P1(Protocol[T, T]): # E: Duplicate type variables in Generic[...] or Protocol[...]
+    def meth(self) -> T:
+        pass
+
+class P2(Protocol[T], Protocol[S]): # E: Only single Generic[...] or Protocol[...] can be in bases
+    def meth(self) -> T:
+        pass
+
+class P3(Protocol[T], Generic[S]): # E: Only single Generic[...] or Protocol[...] can be in bases
+    def meth(self) -> T:
+        pass
+
+class P4(Protocol[T]):
+    attr: Iterable[S] # E: Invalid type "__main__.S"
+
+class P5(Iterable[S], Protocol[T]): # E: If Generic[...] or Protocol[...] is present it should list all type variables
+    def meth(self) -> T:
+        pass
+
+[case testProhibitSelfDefinitionInProtocols]
+from typing import Protocol
+
+class P(Protocol):
+    def __init__(self, a: int) -> None:
+        self.a = a # E: Protocol members cannot be defined via assignment to self \
+                   # E: "P" has no attribute "a"
+
+class B: pass
+class C:
+    def __init__(self, a: int) -> None:
+        pass
+
+x: P
+x = B()
+# The above has an incompatible __init__, but mypy ignores this for nominal subtypes?
+x = C(1)
+
+class P2(Protocol):
+    a: int
+    def __init__(self) -> None:
+        self.a = 1
+
+class B2(P2):
+    a: int
+
+x2: P2 = B2()  # OK
+
+[case testProtocolAndRuntimeAreDefinedAlsoInTypingExtensions]
+from typing_extensions import Protocol, runtime
+
+ at runtime
+class P(Protocol):
+    def meth(self) -> int:
+        pass
+
+x: object
+if isinstance(x, P):
+    reveal_type(x)  # E: Revealed type is '__main__.P'
+    reveal_type(x.meth())  # E: Revealed type is 'builtins.int'
+
+class C:
+    def meth(self) -> int:
+        pass
+
+z: P = C()
+[builtins fixtures/dict.pyi]
+
+[case testProtocolsCannotInheritFromNormal]
+from typing import Protocol
+
+class C: pass
+class D: pass
+
+class P(C, Protocol): # E: All bases of a protocol must be protocols
+    attr: int
+
+class P2(P, D, Protocol): # E: All bases of a protocol must be protocols
+    pass
+
+P2() # E: Cannot instantiate abstract class 'P2' with abstract attribute 'attr'
+p: P2
+reveal_type(p.attr) # E: Revealed type is 'builtins.int'
+
+-- Generic protocol types
+-- ----------------------
+
+[case testGenericMethodWithProtocol]
+from typing import Protocol, TypeVar
+T = TypeVar('T')
+
+class P(Protocol):
+    def meth(self, x: int) -> int:
+        return x
+class C:
+    def meth(self, x: T) -> T:
+        return x
+
+x: P = C()
+
+[case testGenericMethodWithProtocol2]
+from typing import Protocol, TypeVar
+T = TypeVar('T')
+
+class P(Protocol):
+    def meth(self, x: T) -> T:
+        return x
+class C:
+    def meth(self, x: int) -> int:
+        return x
+
+x: P = C()
+[out]
+main:11: error: Incompatible types in assignment (expression has type "C", variable has type "P")
+main:11: note: Following member(s) of "C" have conflicts:
+main:11: note:     Expected:
+main:11: note:         def [T] meth(self, x: T) -> T
+main:11: note:     Got:
+main:11: note:         def meth(self, x: int) -> int
+
+[case testAutomaticProtocolVariance]
+from typing import TypeVar, Protocol
+
+T = TypeVar('T')
+
+# In case of these errors we proceed with declared variance.
+class Pco(Protocol[T]): # E: Invariant type variable 'T' used in protocol where covariant one is expected
+    def meth(self) -> T:
+        pass
+class Pcontra(Protocol[T]): # E: Invariant type variable 'T' used in protocol where contravariant one is expected
+    def meth(self, x: T) -> None:
+        pass
+class Pinv(Protocol[T]):
+    attr: T
+
+class A: pass
+class B(A): pass
+
+x1: Pco[B]
+y1: Pco[A]
+x1 = y1 # E: Incompatible types in assignment (expression has type "Pco[A]", variable has type "Pco[B]")
+y1 = x1 # E: Incompatible types in assignment (expression has type "Pco[B]", variable has type "Pco[A]")
+
+x2: Pcontra[B]
+y2: Pcontra[A]
+y2 = x2 # E: Incompatible types in assignment (expression has type "Pcontra[B]", variable has type "Pcontra[A]")
+x2 = y2 # E: Incompatible types in assignment (expression has type "Pcontra[A]", variable has type "Pcontra[B]")
+
+x3: Pinv[B]
+y3: Pinv[A]
+y3 = x3 # E: Incompatible types in assignment (expression has type "Pinv[B]", variable has type "Pinv[A]")
+x3 = y3 # E: Incompatible types in assignment (expression has type "Pinv[A]", variable has type "Pinv[B]")
+
+[case testProtocolVarianceWithCallableAndList]
+from typing import Protocol, TypeVar, Callable, List
+T = TypeVar('T')
+S = TypeVar('S')
+T_co = TypeVar('T_co', covariant=True)
+
+class P(Protocol[T, S]): # E: Invariant type variable 'T' used in protocol where covariant one is expected \
+                         # E: Invariant type variable 'S' used in protocol where contravariant one is expected
+    def fun(self, callback: Callable[[T], S]) -> None: pass
+
+class P2(Protocol[T_co]): # E: Covariant type variable 'T_co' used in protocol where invariant one is expected
+    lst: List[T_co]
+[builtins fixtures/list.pyi]
+
+[case testProtocolVarianceWithUnusedVariable]
+from typing import Protocol, TypeVar
+T = TypeVar('T')
+
+class P(Protocol[T]): # E: Invariant type variable 'T' used in protocol where covariant one is expected
+    attr: int
+
+[case testGenericProtocolsInference1]
+from typing import Protocol, Sequence, TypeVar
+
+T = TypeVar('T', covariant=True)
+
+class Closeable(Protocol[T]):
+    def close(self) -> T:
+        pass
+
+class F:
+    def close(self) -> int:
+        return 0
+
+def close(arg: Closeable[T]) -> T:
+    return arg.close()
+
+def close_all(args: Sequence[Closeable[T]]) -> T:
+    for arg in args:
+        arg.close()
+    return args[0].close()
+
+arg: Closeable[int]
+
+reveal_type(close(F())) # E: Revealed type is 'builtins.int*'
+reveal_type(close(arg)) # E: Revealed type is 'builtins.int*'
+reveal_type(close_all([F()])) # E: Revealed type is 'builtins.int*'
+reveal_type(close_all([arg])) # E: Revealed type is 'builtins.int*'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testProtocolGenericInference2]
+from typing import Generic, TypeVar, Protocol
+T = TypeVar('T')
+S = TypeVar('S')
+
+class P(Protocol[T, S]):
+    x: T
+    y: S
+
+class C:
+    x: int
+    y: int
+
+def fun3(x: P[T, T]) -> T:
+    pass
+reveal_type(fun3(C())) # E: Revealed type is 'builtins.int*'
+
+[case testProtocolGenericInferenceCovariant]
+from typing import Generic, TypeVar, Protocol
+T = TypeVar('T', covariant=True)
+S = TypeVar('S', covariant=True)
+U = TypeVar('U')
+
+class P(Protocol[T, S]):
+    def x(self) -> T: pass
+    def y(self) -> S: pass
+
+class C:
+    def x(self) -> int: pass
+    def y(self) -> int: pass
+
+def fun4(x: U, y: P[U, U]) -> U:
+    pass
+reveal_type(fun4('a', C())) # E: Revealed type is 'builtins.object*'
+
+[case testUnrealtedGenericProtolsEquivalent]
+from typing import TypeVar, Protocol
+T = TypeVar('T')
+
+class PA(Protocol[T]):
+    attr: int
+    def meth(self) -> T: pass
+    def other(self, arg: T) -> None: pass
+class PB(Protocol[T]): # exactly the same as above
+    attr: int
+    def meth(self) -> T: pass
+    def other(self, arg: T) -> None: pass
+
+def fun(x: PA[T]) -> PA[T]:
+    y: PB[T] = x
+    z: PB[T]
+    return z
+
+x: PA
+y: PB
+x = y
+y = x
+
+xi: PA[int]
+yi: PB[int]
+xi = yi
+yi = xi
+
+[case testGenericSubProtocols]
+from typing import TypeVar, Protocol, Tuple, Generic
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class P1(Protocol[T]):
+    attr1: T
+class P2(P1[T], Protocol[T, S]):
+    attr2: Tuple[T, S]
+
+class C:
+    def __init__(self, a1: int, a2: Tuple[int, int]) -> None:
+        self.attr1 = a1
+        self.attr2 = a2
+
+c: C
+var: P2[int, int] = c
+var2: P2[int, str] = c # E: Incompatible types in assignment (expression has type "C", variable has type "P2[int, str]") \
+                       # N: Following member(s) of "C" have conflicts: \
+                       # N:     attr2: expected "Tuple[int, str]", got "Tuple[int, int]"
+
+class D(Generic[T]):
+    attr1: T
+class E(D[T]):
+    attr2: Tuple[T, T]
+
+def f(x: T) -> T:
+    z: P2[T, T] = E[T]()
+    y: P2[T, T] = D[T]() # E: Incompatible types in assignment (expression has type "D[T]", variable has type "P2[T, T]") \
+                         # N: 'D' is missing following 'P2' protocol member: \
+                         # N:     attr2
+    return x
+[builtins fixtures/isinstancelist.pyi]
+
+[case testGenericSubProtocolsExtensionInvariant]
+from typing import TypeVar, Protocol, Union
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class P1(Protocol[T]):
+    attr1: T
+class P2(Protocol[T]):
+    attr2: T
+class P(P1[T], P2[S], Protocol):
+    pass
+
+class C:
+    attr1: int
+    attr2: str
+
+class A:
+    attr1: A
+class B:
+    attr2: B
+class D(A, B): pass
+
+x: P = D()  # Same as P[Any, Any]
+
+var: P[Union[int, P], Union[P, str]] = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P[Union[int, P[Any, Any]], Union[P[Any, Any], str]]") \
+                                           # N: Following member(s) of "C" have conflicts: \
+                                           # N:     attr1: expected "Union[int, P[Any, Any]]", got "int" \
+                                           # N:     attr2: expected "Union[P[Any, Any], str]", got "str"
+
+[case testGenericSubProtocolsExtensionCovariant]
+from typing import TypeVar, Protocol, Union
+
+T = TypeVar('T', covariant=True)
+S = TypeVar('S', covariant=True)
+
+class P1(Protocol[T]):
+    def attr1(self) -> T: pass
+class P2(Protocol[T]):
+    def attr2(self) -> T: pass
+class P(P1[T], P2[S], Protocol):
+    pass
+
+class C:
+    def attr1(self) -> int: pass
+    def attr2(self) -> str: pass
+
+var: P[Union[int, P], Union[P, str]] = C() # OK for covariant
+var2: P[Union[str, P], Union[P, int]] = C()
+[out]
+main:18: error: Incompatible types in assignment (expression has type "C", variable has type "P[Union[str, P[Any, Any]], Union[P[Any, Any], int]]")
+main:18: note: Following member(s) of "C" have conflicts:
+main:18: note:     Expected:
+main:18: note:         def attr1(self) -> Union[str, P[Any, Any]]
+main:18: note:     Got:
+main:18: note:         def attr1(self) -> int
+main:18: note:     Expected:
+main:18: note:         def attr2(self) -> Union[P[Any, Any], int]
+main:18: note:     Got:
+main:18: note:         def attr2(self) -> str
+
+[case testSelfTypesWithProtocolsBehaveAsWithNominal]
+from typing import Protocol, TypeVar
+
+T = TypeVar('T', bound=Shape)
+class Shape(Protocol):
+    def combine(self: T, other: T) -> T:
+        pass
+
+class NonProtoShape:
+    def combine(self: T, other: T) -> T:
+        pass
+class Circle:
+    def combine(self: T, other: Shape) -> T:
+        pass
+class Triangle:
+    def combine(self, other: Shape) -> Shape:
+        pass
+class Bad:
+    def combine(self, other: int) -> str:
+        pass
+
+def f(s: Shape) -> None: pass
+s: Shape
+
+f(NonProtoShape())
+f(Circle())
+s = Triangle()
+s = Bad()
+
+n2: NonProtoShape = s
+[out]
+main:26: error: Incompatible types in assignment (expression has type "Triangle", variable has type "Shape")
+main:26: note: Following member(s) of "Triangle" have conflicts:
+main:26: note:     Expected:
+main:26: note:         def combine(self, other: Triangle) -> Triangle
+main:26: note:     Got:
+main:26: note:         def combine(self, other: Shape) -> Shape
+main:27: error: Incompatible types in assignment (expression has type "Bad", variable has type "Shape")
+main:27: note: Following member(s) of "Bad" have conflicts:
+main:27: note:     Expected:
+main:27: note:         def combine(self, other: Bad) -> Bad
+main:27: note:     Got:
+main:27: note:         def combine(self, other: int) -> str
+main:29: error: Incompatible types in assignment (expression has type "Shape", variable has type "NonProtoShape")
+
+[case testBadVarianceInProtocols]
+from typing import Protocol, TypeVar
+
+T_co = TypeVar('T_co', covariant=True)
+T_contra = TypeVar('T_contra', contravariant=True)
+
+class Proto(Protocol[T_co, T_contra]):  # type: ignore
+    def one(self, x: T_co) -> None:  # E: Cannot use a covariant type variable as a parameter
+        pass
+    def other(self) -> T_contra:  # E: Cannot use a contravariant type variable as return type
+        pass
+
+# Check that we respect user overrides of variance after the errors are reported
+x: Proto[int, float]
+y: Proto[float, int]
+y = x # OK
+[builtins fixtures/list.pyi]
+
+[case testSubtleBadVarianceInProtocols]
+from typing import Protocol, TypeVar, Iterable, Sequence
+
+T_co = TypeVar('T_co', covariant=True)
+T_contra = TypeVar('T_contra', contravariant=True)
+
+class Proto(Protocol[T_co, T_contra]): # E: Covariant type variable 'T_co' used in protocol where contravariant one is expected \
+                                       # E: Contravariant type variable 'T_contra' used in protocol where covariant one is expected
+    def one(self, x: Iterable[T_co]) -> None:
+        pass
+    def other(self) -> Sequence[T_contra]:
+        pass
+
+# Check that we respect user overrides of variance after the errors are reported
+x: Proto[int, float]
+y: Proto[float, int]
+y = x # OK
+[builtins fixtures/list.pyi]
+
+-- Recursive protocol types
+-- ------------------------
+
+[case testRecursiveProtocols1]
+from typing import Protocol, Sequence, List, Generic, TypeVar
+
+T = TypeVar('T')
+
+class Traversable(Protocol):
+    @property
+    def leaves(self) -> Sequence[Traversable]: pass
+
+class C: pass
+
+class D(Generic[T]):
+    leaves: List[D[T]]
+
+t: Traversable
+t = D[int]() # OK
+t = C() # E: Incompatible types in assignment (expression has type "C", variable has type "Traversable")
+[builtins fixtures/list.pyi]
+
+[case testRecursiveProtocols2]
+from typing import Protocol, TypeVar
+
+T = TypeVar('T')
+class Linked(Protocol[T]):
+    val: T
+    def next(self) -> Linked[T]: pass
+
+class L:
+    val: int
+    def next(self) -> L: pass
+
+def last(seq: Linked[T]) -> T:
+    pass
+
+reveal_type(last(L())) # E: Revealed type is 'builtins.int*'
+[builtins fixtures/list.pyi]
+
+[case testRecursiveProtocolSubtleMismatch]
+from typing import Protocol, TypeVar
+
+T = TypeVar('T')
+class Linked(Protocol[T]):
+    val: T
+    def next(self) -> Linked[T]: pass
+class L:
+    val: int
+    def next(self) -> int: pass
+
+def last(seq: Linked[T]) -> T:
+    pass
+last(L()) # E: Argument 1 to "last" has incompatible type "L"; expected "Linked[<nothing>]"
+
+[case testMutuallyRecursiveProtocols]
+from typing import Protocol, Sequence, List
+
+class P1(Protocol):
+    @property
+    def attr1(self) -> Sequence[P2]: pass
+class P2(Protocol):
+    @property
+    def attr2(self) -> Sequence[P1]: pass
+
+class C: pass
+class A:
+    attr1: List[B]
+class B:
+    attr2: List[A]
+
+t: P1
+t = A() # OK
+t = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P1")
+t = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P1")
+[builtins fixtures/list.pyi]
+
+[case testMutuallyRecursiveProtocolsTypesWithSubteMismatch]
+from typing import Protocol, Sequence, List
+
+class P1(Protocol):
+    @property
+    def attr1(self) -> Sequence[P2]: pass
+class P2(Protocol):
+    @property
+    def attr2(self) -> Sequence[P1]: pass
+
+class C: pass
+class A:
+    attr1: List[B]
+class B:
+    attr2: List[C]
+
+t: P1
+t = A() # E: Incompatible types in assignment (expression has type "A", variable has type "P1") \
+        # N: Following member(s) of "A" have conflicts: \
+        # N:     attr1: expected "Sequence[P2]", got "List[B]"
+[builtins fixtures/list.pyi]
+
+[case testMutuallyRecursiveProtocolsTypesWithSubteMismatchWriteable]
+from typing import Protocol
+
+class P1(Protocol):
+    @property
+    def attr1(self) -> P2: pass
+class P2(Protocol):
+    attr2: P1
+
+class A:
+    attr1: B
+class B:
+    attr2: A
+
+x: P1 = A() # E: Incompatible types in assignment (expression has type "A", variable has type "P1") \
+            # N: Following member(s) of "A" have conflicts: \
+            # N:     attr1: expected "P2", got "B"
+[builtins fixtures/property.pyi]
+
+-- FIXME: things like this should work
+[case testWeirdRecursiveInferenceForProtocols-skip]
+from typing import Protocol, TypeVar, Generic
+T_co = TypeVar('T_co', covariant=True)
+T = TypeVar('T')
+
+class P(Protocol[T_co]):
+    def meth(self) -> P[T_co]: pass
+
+class C(Generic[T]):
+    def meth(self) -> C[T]: pass
+
+x: C[int]
+def f(arg: P[T]) -> T: pass
+reveal_type(f(x)) #E: Revealed type is 'builtins.int*'
+
+-- @property, @classmethod and @staticmethod in protocol types
+-- -----------------------------------------------------------
+
+[case testCannotInstantiateAbstractMethodExplicitProtocolSubtypes]
+from typing import Protocol
+from abc import abstractmethod
+
+class P(Protocol):
+    @abstractmethod
+    def meth(self) -> int:
+        pass
+
+class A(P):
+    pass
+
+A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'meth'
+
+class C(A):
+    def meth(self) -> int:
+        pass
+class C2(P):
+    def meth(self) -> int:
+        pass
+
+C()
+C2()
+
+[case testCannotInstantiateAbstractVariableExplicitProtocolSubtypes]
+from typing import Protocol
+
+class P(Protocol):
+    attr: int
+
+class A(P):
+    pass
+
+A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'attr'
+
+class C(A):
+    attr: int
+class C2(P):
+    def __init__(self) -> None:
+        self.attr = 1
+
+C()
+C2()
+
+class P2(Protocol):
+    attr: int = 1
+
+class B(P2): pass
+B() # OK, attr is not abstract
+
+[case testClassVarsInProtocols]
+from typing import Protocol, ClassVar
+
+class PInst(Protocol):
+   v: int
+
+class PClass(Protocol):
+   v: ClassVar[int]
+
+class CInst:
+   v: int
+
+class CClass:
+   v: ClassVar[int]
+
+x: PInst
+y: PClass
+
+x = CInst()
+x = CClass() # E: Incompatible types in assignment (expression has type "CClass", variable has type "PInst") \
+             # N: Protocol member PInst.v expected instance variable, got class variable
+y = CClass()
+y = CInst()  # E: Incompatible types in assignment (expression has type "CInst", variable has type "PClass") \
+             # N: Protocol member PClass.v expected class variable, got instance variable
+
+[case testPropertyInProtocols]
+from typing import Protocol
+
+class PP(Protocol):
+    @property
+    def attr(self) -> int:
+        pass
+
+class P(Protocol):
+    attr: int
+
+x: P
+y: PP
+y = x
+
+x2: P
+y2: PP
+x2 = y2 # E: Incompatible types in assignment (expression has type "PP", variable has type "P") \
+        # N: Protocol member P.attr expected settable variable, got read-only attribute
+[builtins fixtures/property.pyi]
+
+[case testSettablePropertyInProtocols]
+from typing import Protocol
+
+class PPS(Protocol):
+    @property
+    def attr(self) -> int:
+        pass
+    @attr.setter
+    def attr(self, x: int) -> None:
+        pass
+
+class PP(Protocol):
+    @property
+    def attr(self) -> int:
+        pass
+
+class P(Protocol):
+    attr: int
+
+x: P
+z: PPS
+z = x
+
+x2: P
+z2: PPS
+x2 = z2
+
+y3: PP
+z3: PPS
+y3 = z3
+
+y4: PP
+z4: PPS
+z4 = y4 # E: Incompatible types in assignment (expression has type "PP", variable has type "PPS") \
+        # N: Protocol member PPS.attr expected settable variable, got read-only attribute
+[builtins fixtures/property.pyi]
+
+[case testStaticAndClassMethodsInProtocols]
+from typing import Protocol, Type, TypeVar
+
+class P(Protocol):
+    def meth(self, x: int) -> str:
+        pass
+
+class PC(Protocol):
+    @classmethod
+    def meth(cls, x: int) -> str:
+        pass
+
+class B:
+    @staticmethod
+    def meth(x: int) -> str:
+        pass
+
+class C:
+    def meth(self, x: int) -> str:
+        pass
+
+x: P
+x = C()
+x = B()
+
+y: PC
+y = B()
+y = C() # E: Incompatible types in assignment (expression has type "C", variable has type "PC") \
+        # N: Protocol member PC.meth expected class or static method
+[builtins fixtures/classmethod.pyi]
+
+[case testOverloadedMethodsInProtocols]
+from typing import overload, Protocol, Union
+
+class P(Protocol):
+    @overload
+    def f(self, x: int) -> int: pass
+    @overload
+    def f(self, x: str) -> str: pass
+
+class C:
+    def f(self, x: Union[int, str]) -> None:
+        pass
+class D:
+    def f(self, x: int) -> None:
+        pass
+
+x: P = C()
+x = D()
+[out]
+main:17: error: Incompatible types in assignment (expression has type "D", variable has type "P")
+main:17: note: Following member(s) of "D" have conflicts:
+main:17: note:     Expected:
+main:17: note:         @overload
+main:17: note:         def f(self, x: int) -> int
+main:17: note:         @overload
+main:17: note:         def f(self, x: str) -> str
+main:17: note:     Got:
+main:17: note:         def f(self, x: int) -> None
+
+[case testCannotInstantiateProtocolWithOverloadedUnimplementedMethod]
+from typing import overload, Protocol
+
+class P(Protocol):
+    @overload
+    def meth(self, x: int) -> int: pass
+    @overload
+    def meth(self, x: str) -> bytes: pass
+class C(P):
+    pass
+C() # E: Cannot instantiate abstract class 'C' with abstract attribute 'meth'
+
+[case testCanUseOverloadedImplementationsInProtocols]
+from typing import overload, Protocol, Union
+class P(Protocol):
+    @overload
+    def meth(self, x: int) -> int: pass
+    @overload
+    def meth(self, x: str) -> bool: pass
+    def meth(self, x: Union[int, str]):
+        if isinstance(x, int):
+            return x
+        return True
+
+class C(P):
+    pass
+x = C()
+reveal_type(x.meth('hi')) # E: Revealed type is 'builtins.bool'
+[builtins fixtures/isinstance.pyi]
+
+[case testProtocolsWithIdenticalOverloads]
+from typing import overload, Protocol
+
+class PA(Protocol):
+    @overload
+    def meth(self, x: int) -> int: pass
+    @overload
+    def meth(self, x: str) -> bytes: pass
+class PB(Protocol): # identical to above
+    @overload
+    def meth(self, x: int) -> int: pass
+    @overload
+    def meth(self, x: str) -> bytes: pass
+
+x: PA
+y: PB
+x = y
+def fun(arg: PB) -> None: pass
+fun(x)
+
+[case testProtocolsWithIncompatibleOverloads]
+from typing import overload, Protocol
+
+class PA(Protocol):
+    @overload
+    def meth(self, x: int) -> int: pass
+    @overload
+    def meth(self, x: str) -> bytes: pass
+class PB(Protocol):
+    @overload
+    def meth(self, x: int) -> int: pass
+    @overload
+    def meth(self, x: bytes) -> str: pass
+
+x: PA
+y: PB
+x = y
+[out]
+main:16: error: Incompatible types in assignment (expression has type "PB", variable has type "PA")
+main:16: note: Following member(s) of "PB" have conflicts:
+main:16: note:     Expected:
+main:16: note:         @overload
+main:16: note:         def meth(self, x: int) -> int
+main:16: note:         @overload
+main:16: note:         def meth(self, x: str) -> bytes
+main:16: note:     Got:
+main:16: note:         @overload
+main:16: note:         def meth(self, x: int) -> int
+main:16: note:         @overload
+main:16: note:         def meth(self, x: bytes) -> str
+
+-- Join and meet with protocol types
+-- ---------------------------------
+
+[case testJoinProtocolWithProtocol]
+from typing import Protocol
+
+class P(Protocol):
+    attr: int
+class P2(Protocol):
+    attr: int
+    attr2: str
+
+x: P
+y: P2
+
+l0 = [x, x]
+l1 = [y, y]
+l = [x, y]
+reveal_type(l0) # E: Revealed type is 'builtins.list[__main__.P*]'
+reveal_type(l1) # E: Revealed type is 'builtins.list[__main__.P2*]'
+reveal_type(l) # E: Revealed type is 'builtins.list[__main__.P*]'
+[builtins fixtures/list.pyi]
+
+[case testJoinOfIncompatibleProtocols]
+from typing import Protocol
+
+class P(Protocol):
+    attr: int
+class P2(Protocol):
+    attr2: str
+
+x: P
+y: P2
+reveal_type([x, y]) # E: Revealed type is 'builtins.list[builtins.object*]'
+[builtins fixtures/list.pyi]
+
+[case testJoinProtocolWithNormal]
+from typing import Protocol
+
+class P(Protocol):
+    attr: int
+
+class C:
+    attr: int
+
+x: P
+y: C
+
+l = [x, y]
+
+reveal_type(l) # E: Revealed type is 'builtins.list[__main__.P*]'
+[builtins fixtures/list.pyi]
+
+[case testMeetProtocolWithProtocol]
+from typing import Protocol, Callable, TypeVar
+
+class P(Protocol):
+    attr: int
+class P2(Protocol):
+    attr: int
+    attr2: str
+
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: P, y: P2) -> None: pass
+reveal_type(f(g)) # E: Revealed type is '__main__.P2*'
+
+[case testMeetOfIncompatibleProtocols]
+from typing import Protocol, Callable, TypeVar
+
+class P(Protocol):
+    attr: int
+class P2(Protocol):
+    attr2: str
+
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: P, y: P2) -> None: pass
+x = f(g) # E: "f" does not return a value
+
+[case testMeetProtocolWithNormal]
+from typing import Protocol, Callable, TypeVar
+
+class P(Protocol):
+    attr: int
+class C:
+    attr: int
+
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: P, y: C) -> None: pass
+reveal_type(f(g)) # E: Revealed type is '__main__.C*'
+
+[case testInferProtocolFromProtocol]
+from typing import Protocol, Sequence, TypeVar, Generic
+
+T = TypeVar('T')
+class Box(Protocol[T]):
+    content: T
+class Linked(Protocol[T]):
+    val: T
+    def next(self) -> Linked[T]: pass
+
+class L(Generic[T]):
+    val: Box[T]
+    def next(self) -> L[T]: pass
+
+def last(seq: Linked[T]) -> T:
+    pass
+
+reveal_type(last(L[int]())) # E: Revealed type is '__main__.Box*[builtins.int*]'
+reveal_type(last(L[str]()).content) # E: Revealed type is 'builtins.str*'
+
+[case testOverloadOnProtocol]
+from typing import overload, Protocol, runtime
+
+ at runtime
+class P1(Protocol):
+    attr1: int
+class P2(Protocol):
+    attr2: str
+
+class C1:
+    attr1: int
+class C2:
+    attr2: str
+class C: pass
+
+ at overload
+def f(x: P1) -> int: ...
+ at overload
+def f(x: P2) -> str: ...
+def f(x):
+    if isinstance(x, P1):
+        return P1.attr1
+    if isinstance(x, P2): # E: Only @runtime protocols can be used with instance and class checks
+        return P1.attr2
+
+reveal_type(f(C1())) # E: Revealed type is 'builtins.int'
+reveal_type(f(C2())) # E: Revealed type is 'builtins.str'
+class D(C1, C2): pass # Compatible with both P1 and P2
+# FIXME: the below is not right, see #1322
+reveal_type(f(D())) # E: Revealed type is 'Any'
+f(C()) # E: No overload variant of "f" matches argument types [__main__.C]
+[builtins fixtures/isinstance.pyi]
+
+-- Unions of protocol types
+-- ------------------------
+
+[case testBasicUnionsOfProtocols]
+from typing import Union, Protocol
+
+class P1(Protocol):
+    attr1: int
+class P2(Protocol):
+    attr2: int
+
+class C1:
+    attr1: int
+class C2:
+    attr2: int
+class C(C1, C2):
+    pass
+
+class B: ...
+
+x: Union[P1, P2]
+
+x = C1()
+x = C2()
+x = C()
+x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "Union[P1, P2]")
+
+[case testUnionsOfNormalClassesWithProtocols]
+from typing import Protocol, Union
+
+class P1(Protocol):
+    attr1: int
+class P2(Protocol):
+    attr2: int
+
+class C1:
+    attr1: int
+class C2:
+    attr2: int
+class C(C1, C2):
+    pass
+
+class D1:
+    attr1: int
+
+def f1(x: P1) -> None:
+    pass
+def f2(x: P2) -> None:
+    pass
+
+x: Union[C1, C2]
+y: Union[C1, D1]
+z: Union[C, D1]
+
+f1(x) # E: Argument 1 to "f1" has incompatible type "Union[C1, C2]"; expected "P1"
+f1(y)
+f1(z)
+f2(x) # E: Argument 1 to "f2" has incompatible type "Union[C1, C2]"; expected "P2"
+f2(z) # E: Argument 1 to "f2" has incompatible type "Union[C, D1]"; expected "P2"
+
+-- Type[] with protocol types
+-- --------------------------
+
+[case testInstantiationProtocolInTypeForFunctions]
+from typing import Type, Protocol
+
+class P(Protocol):
+    def m(self) -> None: pass
+class P1(Protocol):
+    def m(self) -> None: pass
+class Pbad(Protocol):
+    def mbad(self) -> int: pass
+class B(P): pass
+class C:
+    def m(self) -> None:
+        pass
+
+def f(cls: Type[P]) -> P:
+    return cls()  # OK
+def g() -> P:
+    return P()  # E: Cannot instantiate protocol class "P"
+
+f(P)  # E: Only concrete class can be given where "Type[P]" is expected
+f(B)  # OK
+f(C)  # OK
+x: Type[P1]
+xbad: Type[Pbad]
+f(x)  # OK
+f(xbad)  # E: Argument 1 to "f" has incompatible type "Type[Pbad]"; expected "Type[P]"
+
+[case testInstantiationProtocolInTypeForAliases]
+from typing import Type, Protocol
+
+class P(Protocol):
+    def m(self) -> None: pass
+class C:
+    def m(self) -> None:
+        pass
+
+def f(cls: Type[P]) -> P:
+    return cls()  # OK
+
+Alias = P
+GoodAlias = C
+Alias()  # E: Cannot instantiate protocol class "P"
+GoodAlias()
+f(Alias)  # E: Only concrete class can be given where "Type[P]" is expected
+f(GoodAlias)
+
+[case testInstantiationProtocolInTypeForVariables]
+from typing import Type, Protocol
+
+class P(Protocol):
+    def m(self) -> None: pass
+class B(P): pass
+class C:
+    def m(self) -> None:
+        pass
+
+var: Type[P]
+var()
+var = P # E: Can only assign concrete classes to a variable of type "Type[P]"
+var = B # OK
+var = C # OK
+
+var_old = None # type: Type[P] # Old syntax for variable annotations
+var_old()
+var_old = P # E: Can only assign concrete classes to a variable of type "Type[P]"
+var_old = B # OK
+var_old = C # OK
+
+[case testInstantiationProtocolInTypeForClassMethods]
+from typing import Type, Protocol
+
+class Logger:
+    @staticmethod
+    def log(a: Type[C]):
+        pass
+class C(Protocol):
+    @classmethod
+    def action(cls) -> None:
+        cls() #OK for classmethods
+        Logger.log(cls)  #OK for classmethods
+[builtins fixtures/classmethod.pyi]
+
+-- isinstance() with @runtime protocols
+-- ------------------------------------
+
+[case testSimpleRuntimeProtocolCheck]
+from typing import Protocol, runtime
+
+ at runtime # E: @runtime can only be used with protocol classes
+class C:
+    pass
+
+class P(Protocol):
+    def meth(self) -> None:
+        pass
+
+ at runtime
+class R(Protocol):
+    def meth(self) -> int:
+        pass
+
+x: object
+
+if isinstance(x, P):  # E: Only @runtime protocols can be used with instance and class checks
+    reveal_type(x)  # E: Revealed type is '__main__.P'
+
+if isinstance(x, R):
+    reveal_type(x)  # E: Revealed type is '__main__.R'
+    reveal_type(x.meth())  # E: Revealed type is 'builtins.int'
+[builtins fixtures/isinstance.pyi]
+
+[case testRuntimeIterableProtocolCheck]
+from typing import Iterable, List, Union
+
+x: Union[int, List[str]]
+
+if isinstance(x, Iterable):
+    reveal_type(x) # E: Revealed type is 'builtins.list[builtins.str]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testConcreteClassesInProtocolsIsInstance]
+from typing import Protocol, runtime, TypeVar, Generic
+
+T = TypeVar('T')
+
+ at runtime
+class P1(Protocol):
+    def meth1(self) -> int:
+        pass
+ at runtime
+class P2(Protocol):
+    def meth2(self) -> int:
+        pass
+ at runtime
+class P(P1, P2, Protocol):
+    pass
+
+class C1(Generic[T]):
+    def meth1(self) -> T:
+        pass
+class C2:
+    def meth2(self) -> int:
+        pass
+class C(C1[int], C2): pass
+
+c = C()
+if isinstance(c, P1):
+    reveal_type(c) # E: Revealed type is '__main__.C'
+else:
+    reveal_type(c) # Unreachable
+if isinstance(c, P):
+    reveal_type(c) # E: Revealed type is '__main__.C'
+else:
+    reveal_type(c) # Unreachable
+
+c1i: C1[int]
+if isinstance(c1i, P1):
+    reveal_type(c1i) # E: Revealed type is '__main__.C1[builtins.int]'
+else:
+    reveal_type(c1i) # Unreachable
+if isinstance(c1i, P):
+    reveal_type(c1i) # Unreachable
+else:
+    reveal_type(c1i) # E: Revealed type is '__main__.C1[builtins.int]'
+
+c1s: C1[str]
+if isinstance(c1s, P1):
+    reveal_type(c1s) # Unreachable
+else:
+    reveal_type(c1s) # E: Revealed type is '__main__.C1[builtins.str]'
+
+c2: C2
+if isinstance(c2, P):
+    reveal_type(c2) # Unreachable
+else:
+    reveal_type(c2) # E: Revealed type is '__main__.C2'
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testConcreteClassesUnionInProtocolsIsInstance]
+from typing import Protocol, runtime, TypeVar, Generic, Union
+
+T = TypeVar('T')
+
+ at runtime
+class P1(Protocol):
+    def meth1(self) -> int:
+        pass
+ at runtime
+class P2(Protocol):
+    def meth2(self) -> int:
+        pass
+
+class C1(Generic[T]):
+    def meth1(self) -> T:
+        pass
+class C2:
+    def meth2(self) -> int:
+        pass
+
+x: Union[C1[int], C2]
+if isinstance(x, P1):
+    reveal_type(x) # E: Revealed type is '__main__.C1[builtins.int]'
+else:
+    reveal_type(x) # E: Revealed type is '__main__.C2'
+
+if isinstance(x, P2):
+    reveal_type(x) # E: Revealed type is '__main__.C2'
+else:
+    reveal_type(x) # E: Revealed type is '__main__.C1[builtins.int]'
+[builtins fixtures/isinstancelist.pyi]
+
+-- Non-Instances and protocol types (Callable vs __call__ etc.)
+-- ------------------------------------------------------------
+
+[case testBasicTupleStructuralSubtyping]
+from typing import Tuple, TypeVar, Protocol
+
+T = TypeVar('T', covariant=True)
+
+class MyProto(Protocol[T]):
+    def __len__(self) -> T:
+        pass
+
+t: Tuple[int, str]
+def f(x: MyProto[int]) -> None:
+    pass
+f(t)  # OK
+
+y: MyProto[str]
+y = t # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "MyProto[str]")
+[builtins fixtures/isinstancelist.pyi]
+
+[case testBasicNamedTupleStructuralSubtyping]
+from typing import NamedTuple, TypeVar, Protocol
+
+T = TypeVar('T', covariant=True)
+S = TypeVar('S', covariant=True)
+
+class P(Protocol[T, S]):
+    @property
+    def x(self) -> T: pass
+    @property
+    def y(self) -> S: pass
+
+class N(NamedTuple):
+    x: int
+    y: str
+class N2(NamedTuple):
+    x: int
+class N3(NamedTuple):
+    x: int
+    y: int
+
+z: N
+z3: N3
+
+def fun(x: P[int, str]) -> None:
+    pass
+def fun2(x: P[int, int]) -> None:
+    pass
+def fun3(x: P[T, T]) -> T:
+    return x.x
+
+fun(z)
+fun2(z) # E: Argument 1 to "fun2" has incompatible type "N"; expected "P[int, int]" \
+        # N: Following member(s) of "N" have conflicts: \
+        # N:     y: expected "int", got "str"
+
+fun(N2(1)) # E: Argument 1 to "fun" has incompatible type "N2"; expected "P[int, str]" \
+           # N: 'N2' is missing following 'P' protocol member: \
+           # N:     y
+
+reveal_type(fun3(z)) # E: Revealed type is 'builtins.object*'
+
+reveal_type(fun3(z3)) # E: Revealed type is 'builtins.int*'
+[builtins fixtures/list.pyi]
+
+[case testBasicCallableStructuralSubtyping]
+from typing import Callable, Generic, TypeVar
+
+def apply(f: Callable[[int], int], x: int) -> int:
+    return f(x)
+
+class Add5:
+    def __call__(self, x: int) -> int:
+        return x + 5
+
+apply(Add5(), 5)
+
+T = TypeVar('T')
+def apply_gen(f: Callable[[T], T]) -> T:
+    pass
+
+reveal_type(apply_gen(Add5())) # E: Revealed type is 'builtins.int*'
+def apply_str(f: Callable[[str], int], x: str) -> int:
+    return f(x)
+apply_str(Add5(), 'a') # E: Argument 1 to "apply_str" has incompatible type "Add5"; expected "Callable[[str], int]" \
+                       # N: "Add5.__call__" has type "Callable[[Arg(int, 'x')], int]"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testMoreComplexCallableStructuralSubtyping]
+from mypy_extensions import Arg, VarArg
+from typing import Protocol, Callable
+
+def call_soon(cb: Callable[[Arg(int, 'x'), VarArg(str)], int]): pass
+
+class Good:
+    def __call__(self, x: int, *rest: str) -> int: pass
+class Bad1:
+    def __call__(self, x: int, *rest: int) -> int: pass
+class Bad2:
+    def __call__(self, y: int, *rest: str) -> int: pass
+call_soon(Good())
+call_soon(Bad1()) # E: Argument 1 to "call_soon" has incompatible type "Bad1"; expected "Callable[[int, VarArg(str)], int]" \
+                  # N: "Bad1.__call__" has type "Callable[[Arg(int, 'x'), VarArg(int)], int]"
+call_soon(Bad2()) # E: Argument 1 to "call_soon" has incompatible type "Bad2"; expected "Callable[[int, VarArg(str)], int]" \
+                  # N: "Bad2.__call__" has type "Callable[[Arg(int, 'y'), VarArg(str)], int]"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testStructuralSupportForPartial]
+from typing import Callable, TypeVar, Generic, Any
+
+T = TypeVar('T')
+
+class partial(Generic[T]):
+    def __init__(self, func: Callable[..., T], *args: Any) -> None: ...
+    def __call__(self, *args: Any) -> T: ...
+
+def inc(a: int, temp: str) -> int:
+    pass
+
+def foo(f: Callable[[int], T]) -> T:
+    return f(1)
+
+reveal_type(foo(partial(inc, 'temp'))) # E: Revealed type is 'builtins.int*'
+[builtins fixtures/list.pyi]
+
+[case testStructuralInferenceForCallable]
+from typing import Callable, TypeVar, Tuple
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class Actual:
+    def __call__(self, arg: int) -> str: pass
+
+def fun(cb: Callable[[T], S]) -> Tuple[T, S]: pass
+reveal_type(fun(Actual())) # E: Revealed type is 'Tuple[builtins.int*, builtins.str*]'
+[builtins fixtures/tuple.pyi]
+
+-- Standard protocol types (SupportsInt, Sized, etc.)
+-- --------------------------------------------------
+
+-- More tests could be added for types from typing converted to protocols
+
+[case testBasicSizedProtocol]
+from typing import Sized
+
+class Foo:
+    def __len__(self) -> int:
+        return 42
+
+def bar(a: Sized) -> int:
+    return a.__len__()
+
+bar(Foo())
+bar((1, 2))
+bar(1) # E: Argument 1 to "bar" has incompatible type "int"; expected "Sized"
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testBasicSupportsIntProtocol]
+from typing import SupportsInt
+
+class Bar:
+    def __int__(self):
+        return 1
+
+def foo(a: SupportsInt):
+    pass
+
+foo(Bar())
+foo('no way') # E: Argument 1 to "foo" has incompatible type "str"; expected "SupportsInt"
+
+[builtins fixtures/isinstancelist.pyi]
+
+-- Additional tests and corner cases for protocols
+-- ----------------------------------------------
+
+[case testAnyWithProtocols]
+from typing import Protocol, Any, TypeVar
+
+T = TypeVar('T')
+
+class P1(Protocol):
+    attr1: int
+class P2(Protocol[T]):
+    attr2: T
+class P3(Protocol):
+    attr: P3
+
+def f1(x: P1) -> None: pass
+def f2(x: P2[str]) -> None: pass
+def f3(x: P3) -> None: pass
+
+class C1:
+    attr1: Any
+class C2:
+    attr2: Any
+class C3:
+    attr: Any
+
+f1(C1())
+f2(C2())
+f3(C3())
+
+f2(C3())  # E: Argument 1 to "f2" has incompatible type "C3"; expected "P2[str]"
+a: Any
+f1(a)
+f2(a)
+f3(a)
+
+[case testErrorsForProtocolsInDifferentPlaces]
+from typing import Protocol
+
+class P(Protocol):
+    attr1: int
+    attr2: str
+    attr3: int
+
+class C:
+    attr1: str
+    @property
+    def attr2(self) -> int: pass
+
+x: P = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P") \
+           # N: 'C' is missing following 'P' protocol member: \
+           # N:     attr3 \
+           # N: Following member(s) of "C" have conflicts: \
+           # N:     attr1: expected "int", got "str" \
+           # N:     attr2: expected "str", got "int" \
+           # N: Protocol member P.attr2 expected settable variable, got read-only attribute
+
+def f(x: P) -> P:
+    return C() # E: Incompatible return value type (got "C", expected "P") \
+               # N: 'C' is missing following 'P' protocol member: \
+               # N:     attr3 \
+               # N: Following member(s) of "C" have conflicts: \
+               # N:     attr1: expected "int", got "str" \
+               # N:     attr2: expected "str", got "int" \
+               # N: Protocol member P.attr2 expected settable variable, got read-only attribute
+
+f(C()) # E: Argument 1 to "f" has incompatible type "C"; expected "P" \
+       # N: 'C' is missing following 'P' protocol member: \
+       # N:     attr3 \
+       # N: Following member(s) of "C" have conflicts: \
+       # N:     attr1: expected "int", got "str" \
+       # N:     attr2: expected "str", got "int" \
+       # N: Protocol member P.attr2 expected settable variable, got read-only attribute
+[builtins fixtures/list.pyi]
+
+[case testIterableProtocolOnClass]
+from typing import TypeVar, Iterator
+T = TypeVar('T', bound='A')
+
+class A:
+    def __iter__(self: T) -> Iterator[T]: pass
+
+class B(A): pass
+
+reveal_type(list(b for b in B()))  # E: Revealed type is 'builtins.list[__main__.B*]'
+reveal_type(list(B()))  # E: Revealed type is 'builtins.list[__main__.B*]'
+[builtins fixtures/list.pyi]
+
+[case testIterableProtocolOnMetaclass]
+from typing import TypeVar, Iterator, Type
+T = TypeVar('T')
+
+class EMeta(type):
+    def __iter__(self: Type[T]) -> Iterator[T]: pass
+
+class E(metaclass=EMeta):
+    pass
+
+class C(E):
+    pass
+
+reveal_type(list(c for c in C))  # E: Revealed type is 'builtins.list[__main__.C*]'
+reveal_type(list(C))  # E: Revealed type is 'builtins.list[__main__.C*]'
+[builtins fixtures/list.pyi]
+
+[case testClassesGetattrWithProtocols]
+from typing import Protocol
+
+class P(Protocol):
+    attr: int
+
+class PP(Protocol):
+    @property
+    def attr(self) -> int:
+        pass
+
+class C:
+    def __getattr__(self, attr: str) -> int:
+        pass
+class C2(C):
+    def __setattr__(self, attr: str, val: int) -> None:
+        pass
+
+class D:
+    def __getattr__(self, attr: str) -> str:
+        pass
+
+def fun(x: P) -> None:
+    reveal_type(P.attr) # E: Revealed type is 'builtins.int'
+def fun_p(x: PP) -> None:
+    reveal_type(P.attr) # E: Revealed type is 'builtins.int'
+
+fun(C())  # E: Argument 1 to "fun" has incompatible type "C"; expected "P" \
+          # N: Protocol member P.attr expected settable variable, got read-only attribute
+fun(C2())
+fun_p(D())  # E: Argument 1 to "fun_p" has incompatible type "D"; expected "PP" \
+            # N: Following member(s) of "D" have conflicts: \
+            # N:     attr: expected "int", got "str"
+fun_p(C())  # OK
+[builtins fixtures/list.pyi]
+
+[case testImplicitTypesInProtocols]
+from typing import Protocol
+
+class P(Protocol):
+    x = 1  # E: All protocol members must have explicitly declared types
+
+class C:
+    x: int
+
+class D:
+    x: str
+
+x: P
+x = D() # E: Incompatible types in assignment (expression has type "D", variable has type "P") \
+        # N: Following member(s) of "D" have conflicts: \
+        # N:     x: expected "int", got "str"
+x = C() # OK
+[builtins fixtures/list.pyi]
+
+[case testProtocolIncompatibilityWithGenericMethod]
+from typing import Protocol, TypeVar
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A(Protocol):
+    def f(self, x: T) -> None: pass
+class B:
+    def f(self, x: S, y: T) -> None: pass
+
+x: A = B()
+[out]
+main:11: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:11: note: Following member(s) of "B" have conflicts:
+main:11: note:     Expected:
+main:11: note:         def [T] f(self, x: T) -> None
+main:11: note:     Got:
+main:11: note:         def [S, T] f(self, x: S, y: T) -> None
+
+[case testProtocolIncompatibilityWithGenericMethodBounded]
+from typing import Protocol, TypeVar
+
+T = TypeVar('T')
+S = TypeVar('S', bound=int)
+
+class A(Protocol):
+    def f(self, x: T) -> None: pass
+class B:
+    def f(self, x: S, y: T) -> None: pass
+
+x: A = B()
+[out]
+main:11: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:11: note: Following member(s) of "B" have conflicts:
+main:11: note:     Expected:
+main:11: note:         def [T] f(self, x: T) -> None
+main:11: note:     Got:
+main:11: note:         def [S <: int, T] f(self, x: S, y: T) -> None
+
+[case testProtocolIncompatibilityWithGenericRestricted]
+from typing import Protocol, TypeVar
+
+T = TypeVar('T')
+S = TypeVar('S', int, str)
+
+class A(Protocol):
+    def f(self, x: T) -> None: pass
+class B:
+    def f(self, x: S, y: T) -> None: pass
+
+x: A = B()
+[out]
+main:11: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:11: note: Following member(s) of "B" have conflicts:
+main:11: note:     Expected:
+main:11: note:         def [T] f(self, x: T) -> None
+main:11: note:     Got:
+main:11: note:         def [S in (int, str), T] f(self, x: S, y: T) -> None
+
+[case testProtocolIncompatibilityWithManyOverloads]
+from typing import Protocol, overload
+
+class C1: pass
+class C2: pass
+class A(Protocol):
+    @overload
+    def f(self, x: int) -> int: pass
+    @overload
+    def f(self, x: str) -> str: pass
+    @overload
+    def f(self, x: C1) -> C2: pass
+    @overload
+    def f(self, x: C2) -> C1: pass
+
+class B:
+    def f(self) -> None: pass
+
+x: A = B()
+[out]
+main:18: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:18: note: Following member(s) of "B" have conflicts:
+main:18: note:     Expected:
+main:18: note:         @overload
+main:18: note:         def f(self, x: int) -> int
+main:18: note:         @overload
+main:18: note:         def f(self, x: str) -> str
+main:18: note:         <2 more overload(s) not shown>
+main:18: note:     Got:
+main:18: note:         def f(self) -> None
+
+[case testProtocolIncompatibilityWithManyConflicts]
+from typing import Protocol
+
+class A(Protocol):
+    def f(self, x: int) -> None: pass
+    def g(self, x: int) -> None: pass
+    def h(self, x: int) -> None: pass
+    def i(self, x: int) -> None: pass
+class B:
+    def f(self, x: str) -> None: pass
+    def g(self, x: str) -> None: pass
+    def h(self, x: str) -> None: pass
+    def i(self, x: str) -> None: pass
+
+x: A = B()
+[out]
+main:14: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:14: note: Following member(s) of "B" have conflicts:
+main:14: note:     Expected:
+main:14: note:         def f(self, x: int) -> None
+main:14: note:     Got:
+main:14: note:         def f(self, x: str) -> None
+main:14: note:     Expected:
+main:14: note:         def g(self, x: int) -> None
+main:14: note:     Got:
+main:14: note:         def g(self, x: str) -> None
+main:14: note:     <2 more conflict(s) not shown>
+
+[case testDontShowNotesForTupleAndIterableProtocol]
+from typing import Iterable, Sequence, Protocol, NamedTuple
+
+class N(NamedTuple):
+    x: int
+
+def f1(x: Iterable[str]) -> None: pass
+def f2(x: Sequence[str]) -> None: pass
+
+# The errors below should be short
+f1(N(1))  # E: Argument 1 to "f1" has incompatible type "N"; expected "Iterable[str]"
+f2(N(2))  # E: Argument 1 to "f2" has incompatible type "N"; expected "Sequence[str]"
+[builtins fixtures/tuple.pyi]
+
+[case testNotManyFlagConflitsShownInProtocols]
+from typing import Protocol
+
+class AllSettable(Protocol):
+    a: int
+    b: int
+    c: int
+    d: int
+
+class AllReadOnly:
+    @property
+    def a(self) -> int: pass
+    @property
+    def b(self) -> int: pass
+    @property
+    def c(self) -> int: pass
+    @property
+    def d(self) -> int: pass
+
+x: AllSettable = AllReadOnly()
+[builtins fixtures/property.pyi]
+[out]
+main:19: error: Incompatible types in assignment (expression has type "AllReadOnly", variable has type "AllSettable")
+main:19: note: Protocol member AllSettable.a expected settable variable, got read-only attribute
+main:19: note: Protocol member AllSettable.b expected settable variable, got read-only attribute
+main:19: note:     <2 more conflict(s) not shown>
+
+[case testProtocolsMoreConflictsNotShown]
+from typing_extensions import Protocol
+from typing import Generic, TypeVar
+
+T = TypeVar('T')
+
+class MockMapping(Protocol[T]):
+    def a(self, x: T) -> int: pass
+    def b(self, x: T) -> int: pass
+    def c(self, x: T) -> int: pass
+    d: T
+    e: T
+    f: T
+
+class MockDict(MockMapping[T]):
+    more: int
+
+def f(x: MockMapping[int]) -> None: pass
+x: MockDict[str]
+f(x)  # E: Argument 1 to "f" has incompatible type "MockDict[str]"; expected "MockMapping[int]"
+
+[case testProtocolNotesForComplexSignatures]
+from typing import Protocol, Optional
+
+class P(Protocol):
+    def meth(self, x: int, *args: str) -> None: pass
+    def other(self, *args, hint: Optional[str] = None, **kwargs: str) -> None: pass
+class C:
+    def meth(self) -> int: pass
+    def other(self) -> int: pass
+
+x: P = C()
+[builtins fixtures/dict.pyi]
+[out]
+main:10: error: Incompatible types in assignment (expression has type "C", variable has type "P")
+main:10: note: Following member(s) of "C" have conflicts:
+main:10: note:     Expected:
+main:10: note:         def meth(self, x: int, *args: str) -> None
+main:10: note:     Got:
+main:10: note:         def meth(self) -> int
+main:10: note:     Expected:
+main:10: note:         def other(self, *args: Any, hint: Optional[str] = ..., **kwargs: str) -> None
+main:10: note:     Got:
+main:10: note:         def other(self) -> int
+
+[case testObjectAllowedInProtocolBases]
+from typing import Protocol
+class P(Protocol, object):
+    pass
+[out]
+
+[case testNoneSubtypeOfEmptyProtocol]
+from typing import Protocol
+class P(Protocol):
+    pass
+
+x: P = None
+[out]
+
+[case testNoneSubtypeOfAllProtocolsWithoutStrictOptional]
+from typing import Protocol
+class P(Protocol):
+    attr: int
+    def meth(self, arg: str) -> str:
+        pass
+
+x: P = None
+[out]
+
+[case testNoneSubtypeOfEmptyProtocolStrict]
+# flags: --strict-optional
+from typing import Protocol
+class P(Protocol):
+    pass
+x: P = None
+
+class PBad(Protocol):
+    x: int
+y: PBad = None  # E: Incompatible types in assignment (expression has type "None", variable has type "PBad")
+[out]
+
+[case testOnlyMethodProtocolUsableWithIsSubclass]
+from typing import Protocol, runtime, Union, Type
+ at runtime
+class P(Protocol):
+    def meth(self) -> int:
+        pass
+ at runtime
+class PBad(Protocol):
+    x: str
+
+class C:
+    x: str
+    def meth(self) -> int:
+        pass
+class E: pass
+
+cls: Type[Union[C, E]]
+issubclass(cls, PBad)  # E: Only protocols that don't have non-method members can be used with issubclass() \
+                       # N: Protocol "PBad" has non-method member(s): x
+if issubclass(cls, P):
+    reveal_type(cls)  # E: Revealed type is 'Type[__main__.C]'
+else:
+    reveal_type(cls)  # E: Revealed type is 'Type[__main__.E]'
+[builtins fixtures/isinstance.pyi]
+[out]
diff --git a/test-data/unit/check-python2.test b/test-data/unit/check-python2.test
index b3b899b..f106412 100644
--- a/test-data/unit/check-python2.test
+++ b/test-data/unit/check-python2.test
@@ -34,7 +34,7 @@ class A:
 print >>A(), ''
 print >>None, ''
 print >>1, '' # E: "int" has no attribute "write"
-print >>(None + ''), None # E: Unsupported left operand type for + (None)
+print >>(None + ''), None # E: Unsupported left operand type for + ("None")
 
 [case testDivision]
 class A:
@@ -77,6 +77,21 @@ except BaseException, e:
     e() # E: "BaseException" not callable
 [builtins_py2 fixtures/exception.pyi]
 
+[case testTryExceptUnsupported]
+try:
+    pass
+except BaseException, (e, f):  # E: Sorry, `except <expr>, <anything but a name>` is not supported
+    pass
+try:
+    pass
+except BaseException, [e, f, g]:  # E: Sorry, `except <expr>, <anything but a name>` is not supported
+    pass
+try:
+    pass
+except BaseException, e[0]:  # E: Sorry, `except <expr>, <anything but a name>` is not supported
+    pass
+[builtins_py2 fixtures/exception.pyi]
+
 [case testAlternateNameSuggestions]
 class Foo(object):
     def say_hello(self):
@@ -291,7 +306,7 @@ class A(object):
     __metaclass__ = M
     y = 0
 reveal_type(A.y) # E: Revealed type is 'builtins.int'
-A.x # E: Type[A] has no attribute "x"
+A.x # E: "Type[A]" has no attribute "x"
 
 [case testAnyAsBaseOfMetaclass]
 from typing import Any, Type
@@ -308,3 +323,12 @@ class A:
 [out]
 main:2: error: Invalid type for self, or extra argument type in function annotation
 main:2: note: (Hint: typically annotations omit the type for self)
+
+[case testSuper]
+class A:
+    def f(self): # type: () -> None
+        pass
+class B(A):
+    def g(self): # type: () -> None
+        super(B, self).f()
+        super().f() # E: Too few arguments for "super"
diff --git a/test-data/unit/check-serialize.test b/test-data/unit/check-serialize.test
index 2996b8b..0c01b75 100644
--- a/test-data/unit/check-serialize.test
+++ b/test-data/unit/check-serialize.test
@@ -662,7 +662,7 @@ class A: pass
 def f() -> None: pass
 [builtins fixtures/tuple.pyi]
 [out2]
-tmp/a.py:4: error: Incompatible types in assignment (expression has type Callable[[], None], variable has type "type")
+tmp/a.py:4: error: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "type")
 
 [case testSerializeOverloadedVsTypeObjectDistinction]
 import a
diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test
index 8c1f85b..cd38fc0 100644
--- a/test-data/unit/check-statements.test
+++ b/test-data/unit/check-statements.test
@@ -377,7 +377,7 @@ import typing
 assert None + None # Fail
 assert None
 [out]
-main:2: error: Unsupported left operand type for + (None)
+main:2: error: Unsupported left operand type for + ("None")
 
 
 -- Exception handling
@@ -594,14 +594,14 @@ else:
     def f3() -> None: pass
 [builtins fixtures/exception.pyi]
 [out]
-main:7: error: Incompatible redefinition (redefinition with type Callable[[], str], original type Callable[[], None])
+main:7: error: Incompatible redefinition (redefinition with type "Callable[[], str]", original type "Callable[[], None]")
 
 [case testExceptWithoutType]
 import typing
 try:
-    -None # E: Unsupported operand type for unary - (None)
+    -None # E: Unsupported operand type for unary - ("None")
 except:
-    ~None # E: Unsupported operand type for ~ (None)
+    ~None # E: Unsupported operand type for ~ ("None")
 [builtins fixtures/exception.pyi]
 
 [case testRaiseWithoutArgument]
@@ -1050,7 +1050,7 @@ while x == 5: ...  # E: Trying to read deleted variable 'x'
 from typing import Iterator
 def f() -> Iterator[int]:
     yield 1
-    yield '' # E: Incompatible types in yield (actual type "str", expected type "int")
+    yield '' # E: Incompatible types in "yield" (actual type "str", expected type "int")
 [builtins fixtures/for.pyi]
 [out]
 
@@ -1102,7 +1102,7 @@ def f() -> int: # E: The return type of a generator function should be "Generato
 from typing import List, Iterator
 def f() -> 'Iterator[List[int]]':
     yield []
-    yield [object()] # E: List item 0 has incompatible type "object"
+    yield [object()] # E: List item 0 has incompatible type "object"; expected "int"
 [builtins fixtures/for.pyi]
 [out]
 
@@ -1136,9 +1136,8 @@ def f() -> Iterator[None]:
 
 -- Yield from statement
 -- --------------------
-
--- Iterables
--- ----------
+--
+-- (It's not really a statement, but don't want to move the tests.)
 
 [case testSimpleYieldFromWithIterator]
 from typing import Iterator
@@ -1189,7 +1188,7 @@ def g() -> Iterator[List[int]]:
     yield [2, 3, 4]
 def f() -> Iterator[List[int]]:
     yield from g()
-    yield from [1, 2, 3]  # E: Incompatible types in "yield from" (actual type "int", expected type List[int])
+    yield from [1, 2, 3]  # E: Incompatible types in "yield from" (actual type "int", expected type "List[int]")
 [builtins fixtures/for.pyi]
 [out]
 
@@ -1216,6 +1215,15 @@ def f(a):
     return b
 [out]
 
+[case testYieldFromGenericCall]
+from typing import Generator, TypeVar
+T = TypeVar('T')
+def f(a: T) -> Generator[int, str, T]: pass
+def g() -> Generator[int, str, float]:
+    r = yield from f('')
+    reveal_type(r)  # E: Revealed type is 'builtins.str*'
+    return 3.14
+
 -- With statement
 -- --------------
 
@@ -1429,7 +1437,7 @@ y = 1
 from typing import List
 bs, cs = None, None # type: List[A], List[B]
 *bs, b = bs
-*bs, c = cs  # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+*bs, c = cs  # E: Incompatible types in assignment (expression has type "List[B]", variable has type "List[A]")
 *ns, c = cs
 nc = cs
 
@@ -1557,3 +1565,54 @@ reveal_type(d['weight0']) # E: Revealed type is 'builtins.float*'
 
 [builtins fixtures/floatdict.pyi]
 
+[case testForwardRefsInForStatementImplicit]
+from typing import List, NamedTuple
+lst: List[N]
+
+for i in lst:
+    reveal_type(i.x)  # E: Revealed type is 'builtins.int'
+    a: str = i[0] # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+N = NamedTuple('N', [('x', int)])
+[builtins fixtures/list.pyi]
+[out]
+
+[case testForwardRefsInForStatement]
+from typing import List, NamedTuple
+lst: List[M]
+
+for i in lst: # type: N
+    reveal_type(i.x)  # E: Revealed type is 'builtins.int'
+    a: str = i[0] # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+N = NamedTuple('N', [('x', int)])
+class M(N): pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testForwardRefsInWithStatementImplicit]
+from typing import ContextManager, Any
+from mypy_extensions import TypedDict
+cm: ContextManager[N]
+
+with cm as g:
+    a: int = g['x']
+
+N = TypedDict('N', {'x': int})
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+
+[case testForwardRefsInWithStatement]
+from typing import ContextManager, Any
+from mypy_extensions import TypedDict
+cm: ContextManager[Any]
+
+with cm as g:  # type: N
+    a: str = g['x']  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+N = TypedDict('N', {'x': int})
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+
diff --git a/test-data/unit/check-super.test b/test-data/unit/check-super.test
index 8853e3d..73ac92a 100644
--- a/test-data/unit/check-super.test
+++ b/test-data/unit/check-super.test
@@ -92,10 +92,6 @@ B(1)
 B(1, 'x')
 [builtins fixtures/__new__.pyi]
 
-[case testSuperOutsideMethodNoCrash]
-class C:
-    a = super().whatever  # E: super() outside of a method is not supported
-
 reveal_type(C.a)  # E: Revealed type is 'Any'
 [out]
 
@@ -115,3 +111,189 @@ class B(A):
     def foo(self):
         super(B, self).foo() # Not an error
 [out]
+
+[case testSuperWithAny]
+class B:
+    def f(self) -> None: pass
+class C(B):
+    def h(self, x) -> None:
+        reveal_type(super(x, x).f) # E: Revealed type is 'def ()'
+        reveal_type(super(C, x).f) # E: Revealed type is 'def ()'
+
+[case testSuperInUnannotatedMethod]
+class C:
+    def h(self):
+        super(C, self).xyz
+
+[case testSuperWithTypeObjects]
+from typing import Type
+
+class A:
+    def f(self) -> object: pass
+
+class B(A):
+    def f(self) -> int: pass
+
+    @classmethod
+    def g(cls, x) -> None:
+        reveal_type(super(cls, x).f) # E: Revealed type is 'def () -> builtins.object'
+
+    def h(self, t: Type[B]) -> None:
+        reveal_type(super(t, self).f) # E: Revealed type is 'def () -> builtins.object'
+[builtins fixtures/classmethod.pyi]
+
+[case testSuperWithTypeTypeAsSecondArgument]
+class B:
+    def f(self) -> None: pass
+
+class C(B):
+    def __new__(cls) -> 'C':
+        super(C, cls).f
+        return C()
+
+[case testSuperWithGenericSelf]
+from typing import TypeVar
+
+T = TypeVar('T', bound='C')
+
+class B:
+    def f(self) -> float: pass
+
+class C(B):
+    def f(self) -> int: pass
+
+    def g(self: T) -> T:
+        reveal_type(super(C, self).f) # E: Revealed type is 'def () -> builtins.float'
+        return self
+
+[case testSuperWithTypeVarValues1]
+from typing import TypeVar
+
+T = TypeVar('T', 'C', 'D')
+S = TypeVar('S', 'B', 'C')
+
+class B:
+    def f(self) -> None: pass
+
+class C(B):
+    def f(self) -> None: pass
+
+    def g(self, x: T, y: S) -> None:
+        super(C, x).f
+        super(C, y).f # E: Argument 2 for "super" not an instance of argument 1
+
+class D(C): pass
+
+[case testSuperWithTypeVarValues2]
+from typing import TypeVar, Generic
+
+T = TypeVar('T', 'C', 'D')
+S = TypeVar('S', 'B', 'C')
+
+class B:
+    def f(self) -> None: pass
+
+class C(B, Generic[T, S]):
+    def f(self) -> None: pass
+
+    def g(self, x: T, y: S) -> None:
+        super(C, x).f
+        super(C, y).f # E: Argument 2 for "super" not an instance of argument 1
+
+class D(C): pass
+
+
+-- Invalid uses of super()
+-- -----------------------
+
+
+[case testSuperOutsideMethodNoCrash]
+class C:
+    a = super().whatever  # E: super() outside of a method is not supported
+
+[case testSuperWithSingleArgument]
+class B:
+    def f(self) -> None: pass
+class C(B):
+    def __init__(self) -> None:
+        super(C).f() # E: "super" with a single argument not supported
+
+[case testSuperWithThreeArguments]
+class B:
+    def f(self) -> None: pass
+class C(B):
+    def h(self) -> None:
+        super(C, self, 1).f() # E: Too many arguments for "super"
+
+[case testSuperWithNonPositionalArguments]
+class B:
+    def f(self) -> None: pass
+class C(B):
+    def h(self) -> None:
+        super(C, x=self).f() # E: "super" only accepts positional arguments
+        super(**{}).f() # E: "super" only accepts positional arguments
+
+[case testSuperWithVarArgs]
+class B:
+    def f(self) -> None: pass
+class C(B):
+    def h(self) -> None:
+        super(*(C, self)).f() # E: Varargs not supported with "super"
+
+[case testInvalidSuperArg]
+class B:
+    def f(self) -> None: pass
+class C(B):
+    def h(self) -> None:
+        super(x, y).f # E: Name 'x' is not defined # E: Name 'y' is not defined
+
+[case testTypeErrorInSuperArg]
+class B:
+    def f(self) -> None: pass
+class C(B):
+    def h(self) -> None:
+        super(1(), self).f # E: "int" not callable
+        super(C, ''()).f  # E: "str" not callable
+
+[case testFlippedSuperArgs]
+class B:
+    def f(self) -> None: pass
+class C(B):
+    def h(self) -> None:
+        super(self, C).f # E: Argument 1 for "super" must be a type object; got a non-type instance
+
+[case testInvalidFirstSuperArg]
+class B:
+    def f(self) -> None: pass
+class C(B):
+    def h(self) -> None:
+        super(None, C).f # E: Argument 1 for "super" must be a type object; got "None"
+
+[case testInvalidSecondArgumentToSuper]
+class B:
+    def f(self) -> None: pass
+class C(B):
+    def h(self) -> None:
+        super(C, 1).f # E: Argument 2 for "super" not an instance of argument 1
+        super(C, None).f # E: Unsupported argument 2 for "super"
+
+[case testSuperInMethodWithNoArguments]
+class A:
+    def f(self) -> None: pass
+
+class B(A):
+    def g() -> None: # E: Method must have at least one argument
+        super().f() # E: super() requires one or more positional arguments in enclosing function
+    def h(self) -> None:
+        def a() -> None:
+            super().f() # E: super() requires one or more positional arguments in enclosing function
+
+[case testSuperWithUnsupportedTypeObject]
+from typing import Type
+
+class A:
+    def f(self) -> int: pass
+
+class B(A):
+    def h(self, t: Type[None]) -> None:
+        super(t, self).f # E: Unsupported argument 1 for "super"
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index 2776172..387eabc 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -91,7 +91,7 @@ from typing import Tuple
 t1 = None # type: Tuple[A, A]
 t2 = None # type: tuple
 
-t1 = t2 # E: Incompatible types in assignment (expression has type Tuple[Any, ...], variable has type "Tuple[A, A]")
+t1 = t2 # E: Incompatible types in assignment (expression has type "Tuple[Any, ...]", variable has type "Tuple[A, A]")
 t2 = t1
 
 class A: pass
@@ -388,7 +388,7 @@ aa, bb, *cc = t  # E: Need type annotation for variable
 from typing import List
 li, lo = None, None # type: List[int], List[object]
 a, b, *c = 1, 2  # type: int, int, List[int]
-c = lo  # E: Incompatible types in assignment (expression has type List[object], variable has type List[int])
+c = lo  # E: Incompatible types in assignment (expression has type "List[object]", variable has type "List[int]")
 c = li
 [builtins fixtures/list.pyi]
 
@@ -448,16 +448,16 @@ na = a  # E
 class A: pass
 [builtins fixtures/list.pyi]
 [out]
-main:6: error: List item 0 has incompatible type "A"
-main:6: error: List item 1 has incompatible type "A"
-main:9: error: Incompatible types in assignment (expression has type "A", variable has type List[A])
+main:6: error: List item 0 has incompatible type "A"; expected "int"
+main:6: error: List item 1 has incompatible type "A"; expected "int"
+main:9: error: Incompatible types in assignment (expression has type "A", variable has type "List[A]")
 
 [case testAssignmentToStarFromTupleInference]
 from typing import List
 li = None # type: List[int]
 la = None # type: List[A]
 a, *l = A(), A()
-l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = li  # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]")
 l = la
 
 class A: pass
@@ -469,7 +469,7 @@ from typing import List
 li = None # type: List[int]
 la = None # type: List[A]
 a, *l = [A(), A()]
-l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = li  # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]")
 l = la
 
 class A: pass
@@ -482,7 +482,7 @@ li = None # type: List[int]
 la = None # type: List[A]
 ta = None # type: Tuple[A, A, A]
 a, *l = ta
-l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = li  # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]")
 l = la
 
 class A: pass
@@ -494,7 +494,7 @@ from typing import List
 li = None # type: List[int]
 la = None # type: List[A]
 a, *l = la
-l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = li  # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]")
 l = la
 
 class A: pass
@@ -579,7 +579,7 @@ class LongTypeName:
     def __add__(self, x: 'LongTypeName') -> 'LongTypeName': pass
 [builtins fixtures/tuple.pyi]
 [out]
-main:3: error: Unsupported operand types for + ("LongTypeName" and tuple(length 50))
+main:3: error: Unsupported operand types for + ("LongTypeName" and <tuple: 50 items>)
 
 
 -- Tuple methods
@@ -704,18 +704,17 @@ from typing import Tuple
 class A(tuple): pass
 [out]
 
-[case testTupleBaseClass2-skip]
+[case testTupleBaseClass2]
 import m
 [file m.pyi]
-# This doesn't work correctly -- no errors are reported (#867)
 from typing import Tuple
 a = None # type: A
 class A(Tuple[int, str]): pass
 x, y = a
-x() # Expected: "int" not callable
-y() # Expected: "str" not callable
+x() # E: "int" not callable
+y() # E: "str" not callable
+[builtins fixtures/tuple.pyi]
 [out]
-(should fail)
 
 [case testGenericClassWithTupleBaseClass]
 from typing import TypeVar, Generic, Tuple
@@ -750,7 +749,7 @@ tb = () # type: Tuple[B, ...]
 fa(ta)
 fa(tb)
 fb(tb)
-fb(ta) # E: Argument 1 to "fb" has incompatible type Tuple[A, ...]; expected Tuple[B, ...]
+fb(ta) # E: Argument 1 to "fb" has incompatible type "Tuple[A, ...]"; expected "Tuple[B, ...]"
 [builtins fixtures/tuple.pyi]
 
 [case testSubtypingFixedAndVariableLengthTuples]
@@ -766,8 +765,8 @@ fa(aa)
 fa(ab)
 fa(bb)
 fb(bb)
-fb(ab) # E: Argument 1 to "fb" has incompatible type "Tuple[A, B]"; expected Tuple[B, ...]
-fb(aa) # E: Argument 1 to "fb" has incompatible type "Tuple[A, A]"; expected Tuple[B, ...]
+fb(ab) # E: Argument 1 to "fb" has incompatible type "Tuple[A, B]"; expected "Tuple[B, ...]"
+fb(aa) # E: Argument 1 to "fb" has incompatible type "Tuple[A, A]"; expected "Tuple[B, ...]"
 [builtins fixtures/tuple.pyi]
 
 [case testSubtypingTupleIsContainer]
@@ -913,7 +912,7 @@ def f(a: Tuple) -> None: pass
 f(())
 f((1,))
 f(('', ''))
-f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected Tuple[Any, ...]
+f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[Any, ...]"
 [builtins fixtures/tuple.pyi]
 
 [case testTupleSingleton]
diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test
index ab4da00..4003266 100644
--- a/test-data/unit/check-type-aliases.test
+++ b/test-data/unit/check-type-aliases.test
@@ -32,7 +32,7 @@ from typing import List
 A = List[int]
 def f(x: A) -> None: pass
 f([1])
-f(['x']) # E: List item 0 has incompatible type "str"
+f(['x']) # E: List item 0 has incompatible type "str"; expected "int"
 [builtins fixtures/list.pyi]
 [out]
 
@@ -112,3 +112,127 @@ EmptyTupleCallable = Callable[[Tuple[()]], None]
 f = None # type: EmptyTupleCallable
 reveal_type(f)  # E: Revealed type is 'def (Tuple[])'
 [builtins fixtures/list.pyi]
+
+[case testForwardTypeAlias]
+def f(p: 'Alias') -> None:
+    pass
+
+reveal_type(f) # E: Revealed type is 'def (p: builtins.int)'
+Alias = int
+[out]
+
+[case testForwardTypeAliasGeneric]
+from typing import TypeVar, Tuple
+def f(p: 'Alias[str]') -> None:
+    pass
+
+reveal_type(f) # E: Revealed type is 'def (p: Tuple[builtins.int, builtins.str])'
+T = TypeVar('T')
+Alias = Tuple[int, T]
+[out]
+
+[case testRecursiveAliasesErrors1]
+from typing import Type, Callable, Union
+
+A = Union[A, int]
+B = Callable[[B], int]
+C = Type[C]
+[out]
+main:3: error: Recursive types not fully supported yet, nested types replaced with "Any"
+main:4: error: Recursive types not fully supported yet, nested types replaced with "Any"
+main:5: error: Recursive types not fully supported yet, nested types replaced with "Any"
+
+[case testRecursiveAliasesErrors2]
+from typing import Type, Callable, Union
+
+A = Union[B, int]
+B = Callable[[C], int]
+C = Type[A]
+[out]
+main:3: error: Recursive types not fully supported yet, nested types replaced with "Any"
+main:4: error: Recursive types not fully supported yet, nested types replaced with "Any"
+main:5: error: Recursive types not fully supported yet, nested types replaced with "Any"
+
+[case testDoubleForwardAlias]
+from typing import List
+x: A
+A = List[B]
+B = List[int]
+reveal_type(x) # E: Revealed type is 'builtins.list[builtins.list[builtins.int]]'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testDoubleForwardAliasWithNamedTuple]
+from typing import List, NamedTuple
+x: A
+A = List[B]
+class B(NamedTuple):
+    x: str
+reveal_type(x[0].x) # E: Revealed type is 'builtins.str'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testJSONAliasApproximation]
+from typing import List, Union, Dict
+x: JSON
+JSON = Union[int, str, List[JSON], Dict[str, JSON]] # type: ignore
+reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any], builtins.dict[builtins.str, Any]]'
+if isinstance(x, list):
+    reveal_type(x) # E: Revealed type is 'builtins.list[Any]'
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testProhibitedForwardRefToTypeVar]
+from typing import TypeVar, List
+
+a: List[T]
+
+T = TypeVar('T')
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Invalid type "__main__.T"
+main:3: note: Forward references to type variables are prohibited
+
+[case testUnsupportedForwardRef]
+from typing import List, TypeVar
+
+T = TypeVar('T')
+
+def f(x: T) -> None:
+    y: A[T]  # E: Unsupported forward reference to "A"
+
+A = List[T]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testUnsupportedForwardRef2]
+from typing import List, TypeVar
+
+def f() -> None:
+    X = List[int]
+    x: A[X]  # E: Unsupported forward reference to "A"
+
+T = TypeVar('T')
+A = List[T]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNoneAlias]
+from typing import Union
+void = type(None)
+x: void
+reveal_type(x)  # E: Revealed type is 'builtins.None'
+y: Union[int, void]
+reveal_type(y)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/bool.pyi]
+
+[case testNoneAliasStrict]
+# flags: --strict-optional
+from typing import Optional, Union
+void = type(None)
+x: int
+y: Union[int, void]
+z: Optional[int]
+x = y  # E: Incompatible types in assignment (expression has type "Optional[int]", variable has type "int")
+y = z
+[builtins fixtures/bool.pyi]
diff --git a/test-data/unit/check-type-checks.test b/test-data/unit/check-type-checks.test
index c4905a7..dc32652 100644
--- a/test-data/unit/check-type-checks.test
+++ b/test-data/unit/check-type-checks.test
@@ -107,7 +107,7 @@ def f(x: object) -> None:
     if isinstance(x, C):
         x.f(1)
         x.f('')
-        x.g() # E: C[Any] has no attribute "g"
+        x.g() # E: "C[Any]" has no attribute "g"
     x.g() # E: "object" has no attribute "g"
 [builtins fixtures/isinstance.pyi]
 [out]
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index 0e908e8..10823ce 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -295,7 +295,7 @@ from mypy_extensions import TypedDict
 from typing import Mapping
 Point = TypedDict('Point', {'x': int, 'y': int})
 def as_mapping(p: Point) -> Mapping[str, str]:
-    return p  # E: Incompatible return value type (got "Point", expected Mapping[str, str])
+    return p  # E: Incompatible return value type (got "Point", expected "Mapping[str, str]")
 [builtins fixtures/dict.pyi]
 
 [case testTypedDictAcceptsIntForFloatDuckTypes]
@@ -341,9 +341,11 @@ from mypy_extensions import TypedDict
 from typing import Dict, MutableMapping
 Point = TypedDict('Point', {'x': int, 'y': int})
 def as_dict(p: Point) -> Dict[str, int]:
-    return p  # E: Incompatible return value type (got "Point", expected Dict[str, int])
+    return p  # E: Incompatible return value type (got "Point", expected "Dict[str, int]")
 def as_mutable_mapping(p: Point) -> MutableMapping[str, int]:
-    return p  # E: Incompatible return value type (got "Point", expected MutableMapping[str, int])
+    return p  # E: Incompatible return value type (got "Point", expected "MutableMapping[str, int]") \
+              # N: 'Point' is missing following 'MutableMapping' protocol member: \
+              # N:     __setitem__
 [builtins fixtures/dict.pyi]
 
 [case testCanConvertTypedDictToAny]
@@ -367,11 +369,57 @@ c: C
 def f(a: A) -> None: pass
 
 l = [a, b]  # Join generates an anonymous TypedDict
-f(l) # E: Argument 1 to "f" has incompatible type List[TypedDict({'x': int})]; expected "A"
+f(l) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x': int})]"; expected "A"
 ll = [b, c]
-f(ll) # E: Argument 1 to "f" has incompatible type List[TypedDict({'x': int, 'z': str})]; expected "A"
+f(ll) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x': int, 'z': str})]"; expected "A"
 [builtins fixtures/dict.pyi]
 
+[case testTypedDictWithSimpleProtocol]
+from typing_extensions import Protocol
+from mypy_extensions import TypedDict
+
+class StrIntMap(Protocol):
+    def __getitem__(self, key: str) -> int: ...
+
+A = TypedDict('A', {'x': int, 'y': int})
+B = TypedDict('B', {'x': int, 'y': str})
+
+def fun(arg: StrIntMap) -> None: ...
+a: A
+b: B
+fun(a)
+fun(b)  # Error
+[builtins fixtures/dict.pyi]
+[out]
+main:14: error: Argument 1 to "fun" has incompatible type "B"; expected "StrIntMap"
+main:14: note: Following member(s) of "B" have conflicts:
+main:14: note:     Expected:
+main:14: note:         def __getitem__(self, str) -> int
+main:14: note:     Got:
+main:14: note:         def __getitem__(self, str) -> object
+
+[case testTypedDictWithSimpleProtocolInference]
+from typing_extensions import Protocol
+from mypy_extensions import TypedDict
+from typing import TypeVar
+
+T_co = TypeVar('T_co', covariant=True)
+T = TypeVar('T')
+
+class StrMap(Protocol[T_co]):
+    def __getitem__(self, key: str) -> T_co: ...
+
+A = TypedDict('A', {'x': int, 'y': int})
+B = TypedDict('B', {'x': int, 'y': str})
+
+def fun(arg: StrMap[T]) -> T:
+    return arg['whatever']
+a: A
+b: B
+reveal_type(fun(a))  # E: Revealed type is 'builtins.int*'
+reveal_type(fun(b))  # E: Revealed type is 'builtins.object*'
+[builtins fixtures/dict.pyi]
+[out]
 
 -- Join
 
@@ -693,7 +741,7 @@ class C:
         A = TypedDict('A', {'x': int})
     def g(self):
         A = TypedDict('A', {'y': int})
-C.A  # E: Type[C] has no attribute "A"
+C.A  # E: "Type[C]" has no attribute "A"
 [builtins fixtures/dict.pyi]
 
 [case testTypedDictInFunction]
@@ -847,7 +895,7 @@ class A: pass
 D = TypedDict('D', {'x': List[int], 'y': int})
 d: D
 reveal_type(d.get('x', [])) # E: Revealed type is 'builtins.list[builtins.int]'
-d.get('x', ['x']) # E: List item 0 has incompatible type "str"
+d.get('x', ['x']) # E: List item 0 has incompatible type "str"; expected "int"
 a = ['']
 reveal_type(d.get('x', a)) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str*]]'
 [builtins fixtures/dict.pyi]
@@ -1048,9 +1096,9 @@ c: C
 def f(a: A) -> None: pass
 
 l = [a, b]  # Join generates an anonymous TypedDict
-f(l) # E: Argument 1 to "f" has incompatible type List[TypedDict({'x'?: int})]; expected "A"
+f(l) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x'?: int})]"; expected "A"
 ll = [b, c]
-f(ll) # E: Argument 1 to "f" has incompatible type List[TypedDict({'x'?: int, 'z'?: str})]; expected "A"
+f(ll) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x'?: int, 'z'?: str})]"; expected "A"
 [builtins fixtures/dict.pyi]
 
 
@@ -1132,9 +1180,16 @@ def f(x: int) -> None: ...
 def f(x): pass
 
 a: A
-f(a)  # E: Argument 1 to "f" has incompatible type "A"; expected Iterable[int]
+f(a)
 [builtins fixtures/dict.pyi]
 [typing fixtures/typing-full.pyi]
+[out]
+main:13: error: Argument 1 to "f" has incompatible type "A"; expected "Iterable[int]"
+main:13: note: Following member(s) of "A" have conflicts:
+main:13: note:     Expected:
+main:13: note:         def __iter__(self) -> Iterator[int]
+main:13: note:     Got:
+main:13: note:         def __iter__(self) -> Iterator[str]
 
 [case testTypedDictOverloading3]
 from typing import overload
@@ -1229,7 +1284,7 @@ class C(B): pass
 x: X
 reveal_type(x) # E: Revealed type is 'TypedDict('__main__.X', {'b': __main__.B, 'c': __main__.C})'
 m1: Mapping[str, B] = x
-m2: Mapping[str, C] = x # E: Incompatible types in assignment (expression has type "X", variable has type Mapping[str, C])
+m2: Mapping[str, C] = x # E: Incompatible types in assignment (expression has type "X", variable has type "Mapping[str, C]")
 [builtins fixtures/dict.pyi]
 
 [case testForwardReferenceInClassTypedDict]
@@ -1243,16 +1298,64 @@ class C(B): pass
 x: X
 reveal_type(x) # E: Revealed type is 'TypedDict('__main__.X', {'b': __main__.B, 'c': __main__.C})'
 m1: Mapping[str, B] = x
-m2: Mapping[str, C] = x # E: Incompatible types in assignment (expression has type "X", variable has type Mapping[str, C])
+m2: Mapping[str, C] = x # E: Incompatible types in assignment (expression has type "X", variable has type "Mapping[str, C]")
 [builtins fixtures/dict.pyi]
 
 [case testForwardReferenceToTypedDictInTypedDict]
 from typing import Mapping
 from mypy_extensions import TypedDict
-# Forward references don't quite work yet
-X = TypedDict('X', {'a': 'A'}) # E: Invalid type "__main__.A"
+X = TypedDict('X', {'a': 'A'})
 A = TypedDict('A', {'b': int})
 x: X
 reveal_type(x) # E: Revealed type is 'TypedDict('__main__.X', {'a': TypedDict('__main__.A', {'b': builtins.int})})'
 reveal_type(x['a']['b']) # E: Revealed type is 'builtins.int'
 [builtins fixtures/dict.pyi]
+
+[case testSelfRecursiveTypedDictInheriting]
+from mypy_extensions import TypedDict
+
+class MovieBase(TypedDict):
+    name: str
+    year: int
+
+class Movie(MovieBase): # type: ignore # warning about recursive not fully supported
+    director: 'Movie'
+
+m: Movie
+reveal_type(m['director']['name']) # E: Revealed type is 'builtins.str'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testTypedDictForwardAsUpperBound]
+from typing import TypeVar, Generic
+from mypy_extensions import TypedDict
+T = TypeVar('T', bound='M')
+class G(Generic[T]):
+    x: T
+
+yb: G[int] # E: Type argument "builtins.int" of "G" must be a subtype of "TypedDict({'x': builtins.int}, fallback=typing.Mapping[builtins.str, builtins.object])"
+yg: G[M]
+z: int = G[M]().x['x']
+
+class M(TypedDict):
+    x: int
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testTypedDictWithImportCycleForward]
+import a
+[file a.py]
+from mypy_extensions import TypedDict
+from b import f
+
+N = TypedDict('N', {'a': str})
+[file b.py]
+import a
+
+def f(x: a.N) -> None:
+    reveal_type(x)
+    reveal_type(x['a'])
+[builtins fixtures/dict.pyi]
+[out]
+tmp/b.py:4: error: Revealed type is 'TypedDict('a.N', {'a': builtins.str})'
+tmp/b.py:5: error: Revealed type is 'builtins.str'
diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test
index fa0453e..1dc5d14 100644
--- a/test-data/unit/check-typevar-values.test
+++ b/test-data/unit/check-typevar-values.test
@@ -7,7 +7,7 @@ T = TypeVar('T', int, str)
 def f(x: T) -> None: pass
 f(1)
 f('x')
-f(object()) # E: Type argument 1 of "f" has incompatible value "object"
+f(object()) # E: Value of type variable "T" of "f" cannot be "object"
 
 [case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext]
 from typing import TypeVar, List
@@ -18,7 +18,7 @@ s = ['x']
 o = [object()]
 i = f(1)
 s = f('')
-o = f(1) # E: Type argument 1 of "f" has incompatible value "object"
+o = f(1) # E: Value of type variable "T" of "f" cannot be "object"
 [builtins fixtures/list.pyi]
 
 [case testCallGenericFunctionWithTypeVarValueRestrictionAndAnyArgs]
@@ -228,7 +228,7 @@ X = TypeVar('X', int, str)
 class A(Generic[X]): pass
 a = None  # type: A[int]
 b = None  # type: A[str]
-d = None  # type: A[object] # E: Type argument 1 of "A" has incompatible value "object"
+d = None  # type: A[object] # E: Value of type variable "X" of "A" cannot be "object"
 c = None  # type: A[Any]
 
 [case testConstructGenericTypeWithTypevarValuesAndTypeInference]
@@ -239,7 +239,7 @@ class A(Generic[X]):
 A(1)
 A('x')
 A(cast(Any, object()))
-A(object()) # E: Type argument 1 of "A" has incompatible value "object"
+A(object()) # E: Value of type variable "X" of "A" cannot be "object"
 
 [case testGenericTypeWithTypevarValuesAndTypevarArgument]
 from typing import TypeVar, Generic
@@ -270,7 +270,7 @@ y = C(S())
 x = y
 y = x
 c_int = C(1) # type: C[int]
-y = c_int # E: Incompatible types in assignment (expression has type C[int], variable has type C[str])
+y = c_int # E: Incompatible types in assignment (expression has type "C[int]", variable has type "C[str]")
 
 [case testGenericTypeBodyWithTypevarValues]
 from typing import TypeVar, Generic
@@ -312,6 +312,52 @@ cs = C() # type: C[str]
 cs.x = ''
 cs.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
 
+[case testAttributeInGenericTypeWithTypevarValues3]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class C(Generic[X]):
+    def f(self, x: X) -> None:
+        self.x = x  # type: X
+ci: C[int]
+cs: C[str]
+reveal_type(ci.x) # E: Revealed type is 'builtins.int*'
+reveal_type(cs.x) # E: Revealed type is 'builtins.str*'
+
+[case testAttributeInGenericTypeWithTypevarValuesUsingInference1]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class C(Generic[X]):
+    def f(self, x: X) -> None:
+        self.x = x # E: Need type annotation for variable
+ci: C[int]
+cs: C[str]
+reveal_type(ci.x) # E: Revealed type is 'Any'
+reveal_type(cs.x) # E: Revealed type is 'Any'
+
+[case testAttributeInGenericTypeWithTypevarValuesUsingInference2]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class C(Generic[X]):
+    def f(self, x: X) -> None:
+        self.x = 1
+        reveal_type(self.x) # E: Revealed type is 'builtins.int'
+ci: C[int]
+cs: C[str]
+reveal_type(ci.x) # E: Revealed type is 'builtins.int'
+reveal_type(cs.x) # E: Revealed type is 'builtins.int'
+
+[case testAttributeInGenericTypeWithTypevarValuesUsingInference3]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class C(Generic[X]):
+    x: X
+    def f(self) -> None:
+        self.y = self.x # E: Need type annotation for variable
+ci: C[int]
+cs: C[str]
+reveal_type(ci.y) # E: Revealed type is 'Any'
+reveal_type(cs.y) # E: Revealed type is 'Any'
+
 [case testInferredAttributeInGenericClassBodyWithTypevarValues]
 from typing import TypeVar, Generic
 X = TypeVar('X', int, str)
@@ -344,8 +390,8 @@ Y = TypeVar('Y', int, str)
 class C(Generic[X, Y]): pass
 a = None  # type: C[A, int]
 b = None  # type: C[B, str]
-c = None  # type: C[int, int] # E: Type argument 1 of "C" has incompatible value "int"
-d = None  # type: C[A, A]     # E: Type argument 2 of "C" has incompatible value "A"
+c = None  # type: C[int, int] # E: Value of type variable "X" of "C" cannot be "int"
+d = None  # type: C[A, A]     # E: Value of type variable "Y" of "C" cannot be "A"
 
 [case testCallGenericFunctionUsingMultipleTypevarsWithValues]
 from typing import TypeVar
@@ -356,8 +402,8 @@ Y = TypeVar('Y', int, str)
 def f(x: X, y: Y) -> None: pass
 f(A(), '')
 f(B(), 1)
-f(A(), A()) # E: Type argument 2 of "f" has incompatible value "A"
-f(1, 1) # E: Type argument 1 of "f" has incompatible value "int"
+f(A(), A())  # E: Value of type variable "Y" of "f" cannot be "A"
+f(1, 1)  # E: Value of type variable "X" of "f" cannot be "int"
 
 [case testGenericFunctionWithNormalAndRestrictedTypevar]
 from typing import TypeVar, Generic
@@ -372,7 +418,7 @@ def f(x: X, y: Y, z: int) -> None:
     z = y # Error
     y.foo # Error
 [out]
-main:8: error: Type argument 1 of "C" has incompatible value "X"
+main:8: error: Value of type variable "Y" of "C" cannot be "X"
 main:9: error: Incompatible types in assignment (expression has type "X", variable has type "int")
 main:10: error: Incompatible types in assignment (expression has type "str", variable has type "int")
 main:11: error: "int" has no attribute "foo"
@@ -403,6 +449,19 @@ c(g(1))
 main:6: error: Argument 1 to "c" has incompatible type "str"; expected "ss"
 main:7: error: Argument 1 to "c" has incompatible type "int"; expected "ss"
 
+[case testDefineAttributeInGenericMethodUsingTypeVarWithValues]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+class A:
+    def f(self, x: T) -> None:
+        self.x = x # E: Need type annotation for variable
+        self.y = [x] # E: Need type annotation for variable
+        self.z = 1
+reveal_type(A().x)  # E: Revealed type is 'Any'
+reveal_type(A().y)  # E: Revealed type is 'Any'
+reveal_type(A().z)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/list.pyi]
+
 
 -- Special cases
 -- -------------
@@ -465,7 +524,7 @@ a = f
 a = g
 b = g
 b = g
-b = f # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[U], U])
+b = f # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[U], U]")
 
 [case testInnerFunctionWithTypevarValues]
 from typing import TypeVar
diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test
index 7dfd225..a43c42a 100644
--- a/test-data/unit/check-unions.test
+++ b/test-data/unit/check-unions.test
@@ -496,3 +496,22 @@ if bool():
     reveal_type(x)  # E: Revealed type is 'Any'
 reveal_type(x)  # E: Revealed type is 'Union[builtins.int, Any]'
 [builtins fixtures/bool.pyi]
+
+[case testLongUnionFormatting]
+from typing import Any, Generic, TypeVar, Union
+
+T = TypeVar('T')
+
+class ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes(Generic[T]):
+    pass
+
+x: Union[ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[int],
+         ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[object],
+         ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[float],
+         ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[str],
+         ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[Any],
+         ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[bytes]]
+
+def takes_int(arg: int) -> None: pass
+
+takes_int(x)  # E: Argument 1 to "takes_int" has incompatible type <union: 6 items>; expected "int"
diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test
index acf975b..245bcd2 100644
--- a/test-data/unit/check-unreachable-code.test
+++ b/test-data/unit/check-unreachable-code.test
@@ -84,7 +84,7 @@ main:6: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports"
 import typing
 MYPY = 0
 if MYPY:
-    None + 1 # E: Unsupported left operand type for + (None)
+    None + 1 # E: Unsupported left operand type for + ("None")
 else:
     None + ''
 [builtins fixtures/bool.pyi]
diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test
index 73dddd5..d1c41a7 100644
--- a/test-data/unit/check-varargs.test
+++ b/test-data/unit/check-varargs.test
@@ -10,8 +10,8 @@ from typing import Tuple
 def f( *b: 'B') -> None:
     ab = None # type: Tuple[B, ...]
     ac = None # type: Tuple[C, ...]
-    b = ac # E: Incompatible types in assignment (expression has type Tuple[C, ...], variable has type Tuple[B, ...])
-    ac = b # E: Incompatible types in assignment (expression has type Tuple[B, ...], variable has type Tuple[C, ...])
+    b = ac # E: Incompatible types in assignment (expression has type "Tuple[C, ...]", variable has type "Tuple[B, ...]")
+    ac = b # E: Incompatible types in assignment (expression has type "Tuple[B, ...]", variable has type "Tuple[C, ...]")
     b = ab
     ab = b
 
@@ -108,7 +108,7 @@ it1 = None  # type: Iterable[int]
 it2 = None  # type: Iterable[str]
 def f(*x: int) -> None: pass
 f(*it1)
-f(*it2) # E: Argument 1 to "f" has incompatible type *Iterable[str]; expected "int"
+f(*it2) # E: Argument 1 to "f" has incompatible type "*Iterable[str]"; expected "int"
 [builtins fixtures/for.pyi]
 
 [case testCallVarargsFunctionWithIterableAndPositional]
@@ -208,7 +208,7 @@ class A: pass
 class B: pass
 [builtins fixtures/list.pyi]
 [out]
-main:7: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
+main:7: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B"
 
 [case testCallingWithTupleVarArgs]
 
@@ -217,9 +217,9 @@ b = None # type: B
 c = None # type: C
 cc = None # type: CC
 
-f(*(a, b, b)) # E: Argument 1 to "f" has incompatible type *"Tuple[A, B, B]"; expected "C"
-f(*(b, b, c)) # E: Argument 1 to "f" has incompatible type *"Tuple[B, B, C]"; expected "A"
-f(a, *(b, b)) # E: Argument 2 to "f" has incompatible type *"Tuple[B, B]"; expected "C"
+f(*(a, b, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, B, B]"; expected "C"
+f(*(b, b, c)) # E: Argument 1 to "f" has incompatible type "*Tuple[B, B, C]"; expected "A"
+f(a, *(b, b)) # E: Argument 2 to "f" has incompatible type "*Tuple[B, B]"; expected "C"
 f(b, *(b, c)) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
 f(*(a, b))    # E: Too few arguments for "f"
 f(*(a, b, c, c)) # E: Too many arguments for "f"
@@ -277,26 +277,26 @@ class A: pass
 class B: pass
 [builtins fixtures/list.pyi]
 [out]
-main:3: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
-main:4: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
+main:3: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B"
+main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B"
 main:5: error: Argument 1 to "f" has incompatible type "B"; expected "A"
 main:6: error: Argument 2 to "f" has incompatible type "A"; expected "B"
-main:7: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
+main:7: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B"
 main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A"
-main:9: error: Argument 1 to "g" has incompatible type *List[B]; expected "A"
+main:9: error: Argument 1 to "g" has incompatible type "*List[B]"; expected "A"
 
 [case testCallingVarArgsFunctionWithTupleVarArgs]
 
 a, b, c, cc = None, None, None, None # type: (A, B, C, CC)
 
-f(*(b, b, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[B, B, B]"; expected "A"
-f(*(a, a, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, A, B]"; expected "B"
-f(*(a, b, a))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, B, A]"; expected "B"
-f(a, *(a, b))   # E: Argument 2 to "f" has incompatible type *"Tuple[A, B]"; expected "B"
+f(*(b, b, b))   # E: Argument 1 to "f" has incompatible type "*Tuple[B, B, B]"; expected "A"
+f(*(a, a, b))   # E: Argument 1 to "f" has incompatible type "*Tuple[A, A, B]"; expected "B"
+f(*(a, b, a))   # E: Argument 1 to "f" has incompatible type "*Tuple[A, B, A]"; expected "B"
+f(a, *(a, b))   # E: Argument 2 to "f" has incompatible type "*Tuple[A, B]"; expected "B"
 f(b, *(b, b))   # E: Argument 1 to "f" has incompatible type "B"; expected "A"
 f(b, b, *(b,))  # E: Argument 1 to "f" has incompatible type "B"; expected "A"
 f(a, a, *(b,))  # E: Argument 2 to "f" has incompatible type "A"; expected "B"
-f(a, b, *(a,))  # E: Argument 3 to "f" has incompatible type *"Tuple[A]"; expected "B"
+f(a, b, *(a,))  # E: Argument 3 to "f" has incompatible type "*Tuple[A]"; expected "B"
 f(*())          # E: Too few arguments for "f"
 f(*(a, b, b))
 f(a, *(b, b))
@@ -340,7 +340,7 @@ from typing import List
 aa = None # type: List[A]
 ab = None # type: List[B]
 
-g(*aa) # E: Argument 1 to "g" has incompatible type *List[A]; expected "B"
+g(*aa) # E: Argument 1 to "g" has incompatible type "*List[A]"; expected "B"
 f(*aa)
 f(*ab)
 g(*ab)
@@ -377,10 +377,10 @@ class B: pass
 [builtins fixtures/list.pyi]
 [out]
 main:3: error: Too few arguments for "f"
-main:4: error: Argument 2 to "f" has incompatible type *List[A]; expected "Optional[B]"
-main:4: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
-main:5: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
-main:6: error: Argument 1 to "f" has incompatible type *"Tuple[A, A, B]"; expected "Optional[B]"
+main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "Optional[B]"
+main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B"
+main:5: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B"
+main:6: error: Argument 1 to "f" has incompatible type "*Tuple[A, A, B]"; expected "Optional[B]"
 
 [case testVarArgsAfterKeywordArgInCall1-skip]
 # see: mypy issue #2729
@@ -492,11 +492,11 @@ class A: pass
 class B: pass
 [builtins fixtures/list.pyi]
 [out]
-main:6: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
-main:7: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
+main:6: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B"
+main:7: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B"
 main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A"
-main:9: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
-main:10: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
+main:9: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B"
+main:10: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B"
 main:11: error: List or tuple expected as variable arguments
 main:12: error: List or tuple expected as variable arguments
 
@@ -506,9 +506,9 @@ S = TypeVar('S')
 T = TypeVar('T')
 a, b = None, None # type: (A, B)
 
-a, a = f(*(a, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, B]"; expected "A"
+a, a = f(*(a, b))   # E: Argument 1 to "f" has incompatible type "*Tuple[A, B]"; expected "A"
 b, b = f(a, *(b,))  # E: Argument 1 to "f" has incompatible type "A"; expected "B"
-a, a = f(*(a, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, B]"; expected "A"
+a, a = f(*(a, b))   # E: Argument 1 to "f" has incompatible type "*Tuple[A, B]"; expected "A"
 b, b = f(a, *(b,))  # E: Argument 1 to "f" has incompatible type "A"; expected "B"
 a, b = f(*(a, b, b)) # E: Too many arguments for "f"
 
@@ -534,8 +534,8 @@ a, aa = G().f(*[a])  # Fail
 aa, a = G().f(*[a])  # Fail
 ab, aa = G().f(*[a]) # Fail
 
-ao, ao = G().f(*[a]) # E: Incompatible types in assignment (expression has type List[<nothing>], variable has type List[object])
-aa, aa = G().f(*[a]) # E: Incompatible types in assignment (expression has type List[<nothing>], variable has type List[A])
+ao, ao = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "List[object]")
+aa, aa = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "List[A]")
 
 class G(Generic[T]):
     def f(self, *a: S) -> Tuple[List[S], List[T]]:
@@ -545,11 +545,11 @@ class A: pass
 class B: pass
 [builtins fixtures/list.pyi]
 [out]
-main:9: error: Incompatible types in assignment (expression has type List[A], variable has type "A")
-main:9: error: Incompatible types in assignment (expression has type List[<nothing>], variable has type List[A])
-main:10: error: Incompatible types in assignment (expression has type List[<nothing>], variable has type "A")
-main:11: error: Incompatible types in assignment (expression has type List[<nothing>], variable has type List[A])
-main:11: error: Argument 1 to "f" of "G" has incompatible type *List[A]; expected "B"
+main:9: error: Incompatible types in assignment (expression has type "List[A]", variable has type "A")
+main:9: error: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "List[A]")
+main:10: error: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "A")
+main:11: error: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "List[A]")
+main:11: error: Argument 1 to "f" of "G" has incompatible type "*List[A]"; expected "B"
 
 
 -- Comment signatures
@@ -577,7 +577,7 @@ x = None # type: Callable[[int], None]
 def f(*x: int) -> None: pass
 def g(*x: str) -> None: pass
 x = f
-x = g # E: Incompatible types in assignment (expression has type Callable[[VarArg(str)], None], variable has type Callable[[int], None])
+x = g # E: Incompatible types in assignment (expression has type "Callable[[VarArg(str)], None]", variable has type "Callable[[int], None]")
 [builtins fixtures/list.pyi]
 [out]
 
@@ -593,3 +593,48 @@ class C:
     def foo(self) -> None: pass
 C().foo()
 C().foo(1)  # The decorator's return type says this should be okay
+
+[case testInvariantDictArgNote]
+from typing import Dict, Sequence
+def f(x: Dict[str, Sequence[int]]) -> None: pass
+def g(x: Dict[str, float]) -> None: pass
+def h(x: Dict[str, int]) -> None: pass
+a = {'a': [1, 2]}
+b = {'b': ['c', 'd']}
+c = {'c': 1.0}
+d = {'d': 1}
+f(a) # E: Argument 1 to "f" has incompatible type "Dict[str, List[int]]"; expected "Dict[str, Sequence[int]]" \
+     # N: "Dict" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \
+     # N: Consider using "Mapping" instead, which is covariant in the value type
+f(b) # E: Argument 1 to "f" has incompatible type "Dict[str, List[str]]"; expected "Dict[str, Sequence[int]]"
+g(c)
+g(d) # E: Argument 1 to "g" has incompatible type "Dict[str, int]"; expected "Dict[str, float]" \
+     # N: "Dict" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \
+     # N: Consider using "Mapping" instead, which is covariant in the value type
+h(c) # E: Argument 1 to "h" has incompatible type "Dict[str, float]"; expected "Dict[str, int]"
+h(d)
+[builtins fixtures/dict.pyi]
+
+[case testInvariantListArgNote]
+from typing import List, Union
+def f(numbers: List[Union[int, float]]) -> None: pass
+a = [1, 2]
+f(a) # E: Argument 1 to "f" has incompatible type "List[int]"; expected "List[Union[int, float]]" \
+     # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \
+     # N: Consider using "Sequence" instead, which is covariant
+x = [1]
+y = ['a']
+x = y # E: Incompatible types in assignment (expression has type "List[str]", variable has type "List[int]")
+[builtins fixtures/list.pyi]
+
+[case testInvariantTypeConfusingNames]
+from typing import TypeVar
+class Listener: pass
+class DictReader: pass
+def f(x: Listener) -> None: pass
+def g(y: DictReader) -> None: pass
+a = [1, 2]
+b = {'b': 1}
+f(a) # E: Argument 1 to "f" has incompatible type "List[int]"; expected "Listener"
+g(b) # E: Argument 1 to "g" has incompatible type "Dict[str, int]"; expected "DictReader"
+[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-warnings.test b/test-data/unit/check-warnings.test
index c95baec..ef2f683 100644
--- a/test-data/unit/check-warnings.test
+++ b/test-data/unit/check-warnings.test
@@ -141,7 +141,22 @@ from typing import Any
 def g() -> Any: pass
 def f() -> int: return g()
 [out]
-main:4: warning: Returning Any from function with declared return type "builtins.int"
+main:4: warning: Returning Any from function declared to return "int"
+
+[case testReturnAnyFromTypedFunctionWithSpecificFormatting]
+# flags: --warn-return-any
+from typing import Any, Tuple
+typ = Tuple[int, int, int, int, int, int, int, int, int, int, int, int, int,
+            int, int, int, int, int, int, int, int, int, int, int, int, int,
+            int, int, int, int, int, int, int, int, int, int, int, int, int,
+            int, int, int, int, int, int, int, int, int, int, int, int, int,
+            int, int, int, int, int, int, int, int, int, int, int, int, int,
+            int, int, int, int, int, int, int, int, int, int, int, int, int,
+            int, int, int, int, int, int, int, int, int, int, int, int, int]
+def g() -> Any: pass
+def f() -> typ: return g()
+[out]
+main:11: warning: Returning Any from function declared to return <tuple: 91 items>
 
 [case testReturnAnySilencedFromTypedFunction]
 # flags: --warn-return-any
diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test
index 098f81d..1bd5b9c 100644
--- a/test-data/unit/cmdline.test
+++ b/test-data/unit/cmdline.test
@@ -806,7 +806,7 @@ Baz = NewType('Baz', Any)  # this error does not come from `--disallow-any=expli
 Bar = NewType('Bar', List[Any])
 
 [out]
-m.py:3: error: Argument 2 to NewType(...) must be subclassable (got Any)
+m.py:3: error: Argument 2 to NewType(...) must be subclassable (got "Any")
 m.py:4: error: Explicit "Any" is not allowed
 
 [case testDisallowAnyExplicitTypedDictSimple]
@@ -1031,3 +1031,50 @@ m.py:4: error: Missing type parameters for generic type
 m.py:5: error: Missing type parameters for generic type
 m.py:6: error: Missing type parameters for generic type
 m.py:7: error: Missing type parameters for generic type
+
+[case testDisallowSubclassingAny]
+# cmd: mypy m.py y.py
+[file mypy.ini]
+[[mypy]
+disallow_subclassing_any = True
+[[mypy-m]
+disallow_subclassing_any = False
+
+[file m.py]
+from typing import Any
+
+x = None  # type: Any
+
+class ShouldBeFine(x): ...
+
+[file y.py]
+from typing import Any
+
+x = None  # type: Any
+
+class ShouldNotBeFine(x): ...
+[out]
+y.py:5: error: Class cannot subclass 'x' (has type 'Any')
+
+[case testDeterministicSectionOrdering]
+# cmd: mypy a
+[file a/__init__.py]
+[file a/b/__init__.py]
+[file a/b/c/__init__.py]
+[file a/b/c/d/__init__.py]
+[file a/b/c/d/e/__init__.py]
+0()
+[file mypy.ini]
+[[mypy]
+[[mypy-a.*]
+ignore_errors = True
+[[mypy-a.b.*]
+ignore_errors = True
+[[mypy-a.b.c.*]
+ignore_errors = True
+[[mypy-a.b.c.d.*]
+ignore_errors = True
+[[mypy-a.b.c.d.e]
+ignore_errors = False
+[out]
+a/b/c/d/e/__init__.py:1: error: "int" not callable
diff --git a/test-data/unit/fixtures/async_await.pyi b/test-data/unit/fixtures/async_await.pyi
index 42c1b53..dcd25de 100644
--- a/test-data/unit/fixtures/async_await.pyi
+++ b/test-data/unit/fixtures/async_await.pyi
@@ -1,7 +1,8 @@
 import typing
 
 T = typing.TypeVar('T')
-class list(typing.Generic[T], typing.Sequence[T]): pass
+U = typing.TypeVar('U')
+class list(typing.Sequence[T]): pass
 
 class object:
     def __init__(self): pass
@@ -9,9 +10,9 @@ class type: pass
 class function: pass
 class int: pass
 class str: pass
-class dict: pass
-class set: pass
-class tuple: pass
+class dict(typing.Generic[T, U]): pass
+class set(typing.Generic[T]): pass
+class tuple(typing.Generic[T]): pass
 class BaseException: pass
 class StopIteration(BaseException): pass
 class StopAsyncIteration(BaseException): pass
diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi
index c4b4f30..a1d1b9c 100644
--- a/test-data/unit/fixtures/bool.pyi
+++ b/test-data/unit/fixtures/bool.pyi
@@ -1,10 +1,12 @@
 # builtins stub used in boolean-related test cases.
+from typing import Generic, TypeVar
+T = TypeVar('T')
 
 class object:
     def __init__(self) -> None: pass
 
 class type: pass
-class tuple: pass
+class tuple(Generic[T]): pass
 class function: pass
 class bool: pass
 class int: pass
diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi
index 4182afb..cf8b61f 100644
--- a/test-data/unit/fixtures/dict.pyi
+++ b/test-data/unit/fixtures/dict.pyi
@@ -11,11 +11,12 @@ class object:
 
 class type: pass
 
-class dict(Mapping[KT, VT], Iterable[KT], Generic[KT, VT]):
+class dict(Generic[KT, VT]):
     @overload
     def __init__(self, **kwargs: VT) -> None: pass
     @overload
     def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass
+    def __getitem__(self, key: KT) -> VT: pass
     def __setitem__(self, k: KT, v: VT) -> None: pass
     def __iter__(self) -> Iterator[KT]: pass
     def update(self, a: Mapping[KT, VT]) -> None: pass
@@ -23,6 +24,7 @@ class dict(Mapping[KT, VT], Iterable[KT], Generic[KT, VT]):
     def get(self, k: KT) -> Optional[VT]: pass
     @overload
     def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass
+    def __len__(self) -> int: ...
 
 class int: # for convenience
     def __add__(self, x: int) -> int: pass
@@ -30,15 +32,16 @@ class int: # for convenience
 class str: pass # for keyword argument key type
 class unicode: pass # needed for py2 docstrings
 
-class list(Iterable[T], Generic[T]): # needed by some test cases
+class list(Generic[T]): # needed by some test cases
     def __getitem__(self, x: int) -> T: pass
     def __iter__(self) -> Iterator[T]: pass
     def __mul__(self, x: int) -> list[T]: pass
 
-class tuple: pass
+class tuple(Generic[T]): pass
 class function: pass
 class float: pass
 class bool: pass
 
 class ellipsis: pass
+def isinstance(x: object, t: Union[type, Tuple]) -> bool: pass
 class BaseException: pass
diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi
index 5a2482d..999a737 100644
--- a/test-data/unit/fixtures/exception.pyi
+++ b/test-data/unit/fixtures/exception.pyi
@@ -1,9 +1,11 @@
+from typing import Generic, TypeVar
+T = TypeVar('T')
 
 class object:
     def __init__(self): pass
 
 class type: pass
-class tuple: pass
+class tuple(Generic[T]): pass
 class function: pass
 class int: pass
 class str: pass
diff --git a/test-data/unit/fixtures/fine_grained.pyi b/test-data/unit/fixtures/fine_grained.pyi
index 83429cd..b2e104c 100644
--- a/test-data/unit/fixtures/fine_grained.pyi
+++ b/test-data/unit/fixtures/fine_grained.pyi
@@ -4,6 +4,9 @@
 #       enough to handle them.
 
 import types
+from typing import TypeVar, Generic
+
+T = TypeVar('T')
 
 class Any: pass
 
@@ -20,7 +23,7 @@ class str:
 
 class float: pass
 class bytes: pass
-class tuple: pass
+class tuple(Generic[T]): pass
 class function: pass
 class ellipsis: pass
-class list: pass
+class list(Generic[T]): pass
diff --git a/test-data/unit/fixtures/float.pyi b/test-data/unit/fixtures/float.pyi
index 38bdc08..1126d61 100644
--- a/test-data/unit/fixtures/float.pyi
+++ b/test-data/unit/fixtures/float.pyi
@@ -1,3 +1,6 @@
+from typing import Generic, TypeVar
+T = TypeVar('T')
+
 Any = 0
 
 class object:
@@ -12,7 +15,7 @@ class str:
 
 class bytes: pass
 
-class tuple: pass
+class tuple(Generic[T]): pass
 class function: pass
 
 class ellipsis: pass
diff --git a/test-data/unit/fixtures/floatdict.pyi b/test-data/unit/fixtures/floatdict.pyi
index 9a34f8d..54850d7 100644
--- a/test-data/unit/fixtures/floatdict.pyi
+++ b/test-data/unit/fixtures/floatdict.pyi
@@ -18,7 +18,7 @@ class str:
 
 class bytes: pass
 
-class tuple: pass
+class tuple(Generic[T]): pass
 class function: pass
 
 class ellipsis: pass
diff --git a/test-data/unit/fixtures/for.pyi b/test-data/unit/fixtures/for.pyi
index 4762806..8b8ce1c 100644
--- a/test-data/unit/fixtures/for.pyi
+++ b/test-data/unit/fixtures/for.pyi
@@ -9,7 +9,7 @@ class object:
     def __init__(self) -> None: pass
 
 class type: pass
-class tuple: pass
+class tuple(Generic[t]): pass
 class function: pass
 class bool: pass
 class int: pass # for convenience
diff --git a/test-data/unit/fixtures/isinstancelist.pyi b/test-data/unit/fixtures/isinstancelist.pyi
index 5ee49b8..99aca1b 100644
--- a/test-data/unit/fixtures/isinstancelist.pyi
+++ b/test-data/unit/fixtures/isinstancelist.pyi
@@ -1,4 +1,4 @@
-from typing import Iterable, Iterator, TypeVar, List, Mapping, overload, Tuple, Set, Union
+from typing import Iterable, Iterator, TypeVar, List, Mapping, overload, Tuple, Set, Union, Generic
 
 class object:
     def __init__(self) -> None: pass
@@ -6,7 +6,6 @@ class object:
 class type:
     def __init__(self, x) -> None: pass
 
-class tuple: pass
 class function: pass
 class ellipsis: pass
 
@@ -24,14 +23,17 @@ T = TypeVar('T')
 KT = TypeVar('KT')
 VT = TypeVar('VT')
 
-class list(Iterable[T]):
+class tuple(Generic[T]):
+    def __len__(self) -> int: pass
+
+class list(Generic[T]):
     def __iter__(self) -> Iterator[T]: pass
     def __mul__(self, x: int) -> list[T]: pass
     def __setitem__(self, x: int, v: T) -> None: pass
     def __getitem__(self, x: int) -> T: pass
     def __add__(self, x: List[T]) -> T: pass
 
-class dict(Iterable[KT], Mapping[KT, VT]):
+class dict(Mapping[KT, VT]):
     @overload
     def __init__(self, **kwargs: VT) -> None: pass
     @overload
@@ -40,7 +42,7 @@ class dict(Iterable[KT], Mapping[KT, VT]):
     def __iter__(self) -> Iterator[KT]: pass
     def update(self, a: Mapping[KT, VT]) -> None: pass
 
-class set(Iterable[T]):
+class set(Generic[T]):
     def __iter__(self) -> Iterator[T]: pass
     def add(self, x: T) -> None: pass
     def discard(self, x: T) -> None: pass
diff --git a/test-data/unit/fixtures/list.pyi b/test-data/unit/fixtures/list.pyi
index d5d1000..7b6d1db 100644
--- a/test-data/unit/fixtures/list.pyi
+++ b/test-data/unit/fixtures/list.pyi
@@ -10,7 +10,7 @@ class object:
 class type: pass
 class ellipsis: pass
 
-class list(Iterable[T], Generic[T]):
+class list(Generic[T]):
     @overload
     def __init__(self) -> None: pass
     @overload
diff --git a/test-data/unit/fixtures/module.pyi b/test-data/unit/fixtures/module.pyi
index 44a4dfe..ac1d368 100644
--- a/test-data/unit/fixtures/module.pyi
+++ b/test-data/unit/fixtures/module.pyi
@@ -13,7 +13,7 @@ class function: pass
 class int: pass
 class str: pass
 class bool: pass
-class tuple: pass
+class tuple(Generic[T]): pass
 class dict(Generic[T, S]): pass
 class ellipsis: pass
 
diff --git a/test-data/unit/fixtures/module_all.pyi b/test-data/unit/fixtures/module_all.pyi
index 2ab6bc6..87959fe 100644
--- a/test-data/unit/fixtures/module_all.pyi
+++ b/test-data/unit/fixtures/module_all.pyi
@@ -14,5 +14,5 @@ class list(Generic[_T], Sequence[_T]):
     def append(self, x: _T): pass
     def extend(self, x: Sequence[_T]): pass
     def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass
-class tuple: pass
+class tuple(Generic[_T]): pass
 class ellipsis: pass
diff --git a/test-data/unit/fixtures/module_all_python2.pyi b/test-data/unit/fixtures/module_all_python2.pyi
index 5a48e60..989333c 100644
--- a/test-data/unit/fixtures/module_all_python2.pyi
+++ b/test-data/unit/fixtures/module_all_python2.pyi
@@ -12,4 +12,4 @@ class list(Generic[_T], Sequence[_T]):
     def append(self, x: _T): pass
     def extend(self, x: Sequence[_T]): pass
     def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass
-class tuple: pass
+class tuple(Generic[_T]): pass
diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi
index 4b5611b..6cedba3 100644
--- a/test-data/unit/fixtures/primitives.pyi
+++ b/test-data/unit/fixtures/primitives.pyi
@@ -1,4 +1,6 @@
 # builtins stub with non-generic primitive types
+from typing import Generic, TypeVar
+T = TypeVar('T')
 
 class object:
     def __init__(self) -> None: pass
@@ -17,5 +19,5 @@ class str:
     def format(self, *args) -> str: pass
 class bytes: pass
 class bytearray: pass
-class tuple: pass
+class tuple(Generic[T]): pass
 class function: pass
diff --git a/test-data/unit/fixtures/set.pyi b/test-data/unit/fixtures/set.pyi
index cb8bbcf..79d53e8 100644
--- a/test-data/unit/fixtures/set.pyi
+++ b/test-data/unit/fixtures/set.pyi
@@ -8,7 +8,7 @@ class object:
     def __init__(self) -> None: pass
 
 class type: pass
-class tuple: pass
+class tuple(Generic[T]): pass
 class function: pass
 
 class int: pass
diff --git a/test-data/unit/fixtures/slice.pyi b/test-data/unit/fixtures/slice.pyi
index c01ffbb..47c1999 100644
--- a/test-data/unit/fixtures/slice.pyi
+++ b/test-data/unit/fixtures/slice.pyi
@@ -1,10 +1,12 @@
 # Builtins stub used in slicing test cases.
+from typing import Generic, TypeVar
+T = TypeVar('T')
 
 class object:
     def __init__(self): pass
 
 class type: pass
-class tuple: pass
+class tuple(Generic[T]): pass
 class function: pass
 
 class int: pass
diff --git a/test-data/unit/fixtures/type.pyi b/test-data/unit/fixtures/type.pyi
index 7600021..4a2dcac 100644
--- a/test-data/unit/fixtures/type.pyi
+++ b/test-data/unit/fixtures/type.pyi
@@ -13,7 +13,7 @@ class list(Generic[T]): pass
 class type:
     def mro(self) -> List['type']: pass
 
-class tuple: pass
+class tuple(Generic[T]): pass
 class function: pass
 class bool: pass
 class int: pass
diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi
index d43e340..62fac70 100644
--- a/test-data/unit/fixtures/typing-full.pyi
+++ b/test-data/unit/fixtures/typing-full.pyi
@@ -17,6 +17,7 @@ Union = 0
 Optional = 0
 TypeVar = 0
 Generic = 0
+Protocol = 0
 Tuple = 0
 Callable = 0
 _promote = 0
@@ -33,33 +34,42 @@ Dict = 0
 Set = 0
 
 T = TypeVar('T')
+T_co = TypeVar('T_co', covariant=True)
+T_contra = TypeVar('T_contra', contravariant=True)
 U = TypeVar('U')
 V = TypeVar('V')
 S = TypeVar('S')
 
-class Container(Generic[T]):
+# Note: definitions below are different from typeshed, variances are declared
+# to silence the protocol variance checks. Maybe it is better to use type: ignore?
+
+ at runtime
+class Container(Protocol[T_contra]):
     @abstractmethod
     # Use int because bool isn't in the default test builtins
-    def __contains__(self, arg: T) -> int: pass
+    def __contains__(self, arg: T_contra) -> int: pass
 
-class Sized:
+ at runtime
+class Sized(Protocol):
     @abstractmethod
     def __len__(self) -> int: pass
 
-class Iterable(Generic[T]):
+ at runtime
+class Iterable(Protocol[T_co]):
     @abstractmethod
-    def __iter__(self) -> 'Iterator[T]': pass
+    def __iter__(self) -> 'Iterator[T_co]': pass
 
-class Iterator(Iterable[T], Generic[T]):
+ at runtime
+class Iterator(Iterable[T_co], Protocol):
     @abstractmethod
-    def __next__(self) -> T: pass
+    def __next__(self) -> T_co: pass
 
 class Generator(Iterator[T], Generic[T, U, V]):
     @abstractmethod
     def send(self, value: U) -> T: pass
 
     @abstractmethod
-    def throw(self, typ: Any, val: Any=None, tb=None) -> None: pass
+    def throw(self, typ: Any, val: Any=None, tb: Any=None) -> None: pass
 
     @abstractmethod
     def close(self) -> None: pass
@@ -83,38 +93,52 @@ class AsyncGenerator(AsyncIterator[T], Generic[T, U]):
     @abstractmethod
     def __aiter__(self) -> 'AsyncGenerator[T, U]': pass
 
-class Awaitable(Generic[T]):
+ at runtime
+class Awaitable(Protocol[T]):
     @abstractmethod
     def __await__(self) -> Generator[Any, Any, T]: pass
 
 class AwaitableGenerator(Generator[T, U, V], Awaitable[V], Generic[T, U, V, S]):
     pass
 
-class AsyncIterable(Generic[T]):
+ at runtime
+class AsyncIterable(Protocol[T]):
     @abstractmethod
     def __aiter__(self) -> 'AsyncIterator[T]': pass
 
-class AsyncIterator(AsyncIterable[T], Generic[T]):
+ at runtime
+class AsyncIterator(AsyncIterable[T], Protocol):
     def __aiter__(self) -> 'AsyncIterator[T]': return self
     @abstractmethod
     def __anext__(self) -> Awaitable[T]: pass
 
-class Sequence(Iterable[T], Generic[T]):
+ at runtime
+class Sequence(Iterable[T_co], Protocol):
     @abstractmethod
-    def __getitem__(self, n: Any) -> T: pass
+    def __getitem__(self, n: Any) -> T_co: pass
 
-class Mapping(Iterable[T], Sized, Generic[T, U]):
+ at runtime
+class Mapping(Iterable[T], Protocol[T, T_co]):
+    def __getitem__(self, key: T) -> T_co: pass
     @overload
-    def get(self, k: T) -> Optional[U]: ...
+    def get(self, k: T) -> Optional[T_co]: pass
     @overload
-    def get(self, k: T, default: Union[U, V]) -> Union[U, V]: ...
-    def values(self) -> Iterable[U]: pass  # Approximate return type
+    def get(self, k: T, default: Union[T_co, V]) -> Union[T_co, V]: pass
+    def values(self) -> Iterable[T_co]: pass  # Approximate return type
     def __len__(self) -> int: ...
 
-class MutableMapping(Mapping[T, U]): pass
+ at runtime
+class MutableMapping(Mapping[T, U], Protocol):
+    def __setitem__(self, k: T, v: U) -> None: pass
+
+class SupportsInt(Protocol):
+    def __int__(self) -> int: pass
+
+def runtime(cls: T) -> T:
+    return cls
 
 class ContextManager(Generic[T]):
-    def __enter__(self) -> T: ...
-    def __exit__(self, exc_type, exc_value, traceback): ...
+    def __enter__(self) -> T: pass
+    def __exit__(self, exc_type, exc_value, traceback): pass
 
 TYPE_CHECKING = 1
diff --git a/test-data/unit/fixtures/union.pyi b/test-data/unit/fixtures/union.pyi
index 78a41f9..489e3dd 100644
--- a/test-data/unit/fixtures/union.pyi
+++ b/test-data/unit/fixtures/union.pyi
@@ -1,7 +1,8 @@
 # Builtins stub used in tuple-related test cases.
 
 from isinstance import isinstance
-from typing import Iterable, TypeVar
+from typing import Iterable, TypeVar, Generic
+T = TypeVar('T')
 
 class object:
     def __init__(self): pass
@@ -9,9 +10,7 @@ class object:
 class type: pass
 class function: pass
 
-# Current tuple types get special treatment in the type checker, thus there
-# is no need for type arguments here.
-class tuple: pass
+class tuple(Generic[T]): pass
 
 # We need int for indexing tuples.
 class int: pass
diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi
index 457bea0..87b50f5 100644
--- a/test-data/unit/lib-stub/builtins.pyi
+++ b/test-data/unit/lib-stub/builtins.pyi
@@ -15,7 +15,6 @@ class bytes: pass
 
 class tuple: pass
 class function: pass
-
 class ellipsis: pass
 
 # Definition of None is implicit
diff --git a/test-data/unit/lib-stub/contextlib.pyi b/test-data/unit/lib-stub/contextlib.pyi
new file mode 100644
index 0000000..fa4760c
--- /dev/null
+++ b/test-data/unit/lib-stub/contextlib.pyi
@@ -0,0 +1,10 @@
+from typing import Generic, TypeVar, Callable, Iterator
+from typing import ContextManager as ContextManager
+
+_T = TypeVar('_T')
+
+class GeneratorContextManager(ContextManager[_T], Generic[_T]):
+    def __call__(self, func: Callable[..., _T]) -> Callable[..., _T]: ...
+
+def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., GeneratorContextManager[_T]]:
+    ...
diff --git a/test-data/unit/lib-stub/six.pyi b/test-data/unit/lib-stub/six.pyi
index a6faa32..97dbeab 100644
--- a/test-data/unit/lib-stub/six.pyi
+++ b/test-data/unit/lib-stub/six.pyi
@@ -1,2 +1,3 @@
-from typing import Type
+from typing import Type, Callable
 def with_metaclass(mcls: Type[type], *args: type) -> type: pass
+def add_metaclass(mcls: Type[type]) -> Callable[[type], type]: pass
diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi
index 02412c7..8be4abc 100644
--- a/test-data/unit/lib-stub/typing.pyi
+++ b/test-data/unit/lib-stub/typing.pyi
@@ -12,6 +12,7 @@ Union = 0
 Optional = 0
 TypeVar = 0
 Generic = 0
+Protocol = 0  # This is not yet defined in typeshed, see PR typeshed/#1220
 Tuple = 0
 Callable = 0
 _promote = 0
@@ -28,37 +29,57 @@ Dict = 0
 Set = 0
 
 T = TypeVar('T')
+T_co = TypeVar('T_co', covariant=True)
+T_contra = TypeVar('T_contra', contravariant=True)
 U = TypeVar('U')
 V = TypeVar('V')
 S = TypeVar('S')
 
-class Container(Generic[T]):
+# Note: definitions below are different from typeshed, variances are declared
+# to silence the protocol variance checks. Maybe it is better to use type: ignore?
+
+ at runtime
+class Container(Protocol[T_contra]):
     @abstractmethod
     # Use int because bool isn't in the default test builtins
-    def __contains__(self, arg: T) -> int: pass
+    def __contains__(self, arg: T_contra) -> int: pass
 
-class Sized:
+ at runtime
+class Sized(Protocol):
     @abstractmethod
     def __len__(self) -> int: pass
 
-class Iterable(Generic[T]):
+ at runtime
+class Iterable(Protocol[T_co]):
     @abstractmethod
-    def __iter__(self) -> 'Iterator[T]': pass
+    def __iter__(self) -> 'Iterator[T_co]': pass
 
-class Iterator(Iterable[T], Generic[T]):
+ at runtime
+class Iterator(Iterable[T_co], Protocol):
     @abstractmethod
-    def __next__(self) -> T: pass
+    def __next__(self) -> T_co: pass
 
 class Generator(Iterator[T], Generic[T, U, V]):
     @abstractmethod
     def __iter__(self) -> 'Generator[T, U, V]': pass
 
-class Sequence(Iterable[T], Generic[T]):
+ at runtime
+class Sequence(Iterable[T_co], Protocol):
     @abstractmethod
-    def __getitem__(self, n: Any) -> T: pass
+    def __getitem__(self, n: Any) -> T_co: pass
+
+ at runtime
+class Mapping(Protocol[T_contra, T_co]):
+    def __getitem__(self, key: T_contra) -> T_co: pass
+
+ at runtime
+class MutableMapping(Mapping[T_contra, U], Protocol):
+    def __setitem__(self, k: T_contra, v: U) -> None: pass
 
-class Mapping(Generic[T, U]): pass
+class SupportsInt(Protocol):
+    def __int__(self) -> int: pass
 
-class MutableMapping(Generic[T, U]): pass
+def runtime(cls: T) -> T:
+    return cls
 
 TYPE_CHECKING = 1
diff --git a/test-data/unit/lib-stub/typing_extensions.pyi b/test-data/unit/lib-stub/typing_extensions.pyi
new file mode 100644
index 0000000..8c5be8f
--- /dev/null
+++ b/test-data/unit/lib-stub/typing_extensions.pyi
@@ -0,0 +1,6 @@
+from typing import TypeVar
+
+_T = TypeVar('_T')
+
+class Protocol: pass
+def runtime(x: _T) -> _T: pass
diff --git a/test-data/unit/parse.test b/test-data/unit/parse.test
index 39cbd58..417d37c 100644
--- a/test-data/unit/parse.test
+++ b/test-data/unit/parse.test
@@ -2480,7 +2480,7 @@ class Foo(metaclass=Bar): pass
 MypyFile:1(
   ClassDef:1(
     Foo
-    Metaclass(Bar)
+    Metaclass(NameExpr(Bar))
     PassStmt:1()))
 
 [case testQualifiedMetaclass]
@@ -2489,7 +2489,9 @@ class Foo(metaclass=foo.Bar): pass
 MypyFile:1(
   ClassDef:1(
     Foo
-    Metaclass(foo.Bar)
+    Metaclass(MemberExpr:1(
+      NameExpr(foo)
+      Bar))
     PassStmt:1()))
 
 [case testBaseAndMetaclass]
@@ -2498,7 +2500,7 @@ class Foo(foo.bar[x], metaclass=Bar): pass
 MypyFile:1(
   ClassDef:1(
     Foo
-    Metaclass(Bar)
+    Metaclass(NameExpr(Bar))
     BaseTypeExpr(
       IndexExpr:1(
         MemberExpr:1(
@@ -2521,7 +2523,7 @@ class Foo(_root=None, metaclass=Bar): pass
 MypyFile:1(
   ClassDef:1(
     Foo
-    Metaclass(Bar)
+    Metaclass(NameExpr(Bar))
     PassStmt:1()))
 
 [case testClassKeywordArgsAfterMeta]
@@ -2530,7 +2532,7 @@ class Foo(metaclass=Bar, _root=None): pass
 MypyFile:1(
   ClassDef:1(
     Foo
-    Metaclass(Bar)
+    Metaclass(NameExpr(Bar))
     PassStmt:1()))
 
 [case testNamesThatAreNoLongerKeywords]
diff --git a/test-data/unit/plugins/type_anal_hook.py b/test-data/unit/plugins/type_anal_hook.py
index 0e7a0ee..06f2e60 100644
--- a/test-data/unit/plugins/type_anal_hook.py
+++ b/test-data/unit/plugins/type_anal_hook.py
@@ -1,7 +1,7 @@
 from typing import Optional, Callable
 
 from mypy.plugin import Plugin, AnalyzeTypeContext
-from mypy.types import Type, UnboundType, TypeList, AnyType, NoneTyp, CallableType
+from mypy.types import Type, UnboundType, TypeList, AnyType, NoneTyp, CallableType, TypeOfAny
 
 
 class TypeAnalyzePlugin(Plugin):
@@ -16,13 +16,13 @@ def signal_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:
     if (len(ctx.type.args) != 1
             or not isinstance(ctx.type.args[0], TypeList)):
         ctx.api.fail('Invalid "Signal" type (expected "Signal[[t, ...]]")', ctx.context)
-        return AnyType()
+        return AnyType(TypeOfAny.from_error)
 
     args = ctx.type.args[0]
     assert isinstance(args, TypeList)
     analyzed = ctx.api.analyze_callable_args(args)
     if analyzed is None:
-        return AnyType()  # Error generated elsewhere
+        return AnyType(TypeOfAny.from_error)  # Error generated elsewhere
     arg_types, arg_kinds, arg_names = analyzed
     arg_types = [ctx.api.analyze_type(arg) for arg in arg_types]
     type_arg = CallableType(arg_types,
diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test
index 6d16903..b7f78be 100644
--- a/test-data/unit/pythoneval-asyncio.test
+++ b/test-data/unit/pythoneval-asyncio.test
@@ -339,7 +339,7 @@ loop.run_until_complete(future)
 print(future.result())
 loop.close()
 [out]
-_program.py:12: error: Argument 1 to "slow_operation" has incompatible type Future[str]; expected Future[int]
+_program.py:12: error: Argument 1 to "slow_operation" has incompatible type "Future[str]"; expected "Future[int]"
 
 [case testErrorUsingDifferentFutureTypeAndSetFutureDifferentInternalType]
 from typing import Generator, Any
@@ -359,7 +359,7 @@ print(future.result())
 loop.close()
 [out]
 _program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "str"; expected "int"
-_program.py:12: error: Argument 1 to "slow_operation" has incompatible type Future[str]; expected Future[int]
+_program.py:12: error: Argument 1 to "slow_operation" has incompatible type "Future[str]"; expected "Future[int]"
 
 [case testErrorSettingCallbackWithDifferentFutureType]
 import typing
@@ -386,7 +386,7 @@ try:
 finally:
     loop.close()
 [out]
-_program.py:18: error: Argument 1 to "add_done_callback" of "Future" has incompatible type Callable[[Future[int]], None]; expected Callable[[Future[str]], Any]
+_program.py:18: error: Argument 1 to "add_done_callback" of "Future" has incompatible type "Callable[[Future[int]], None]"; expected "Callable[[Future[str]], Any]"
 
 [case testErrorOneMoreFutureInReturnType]
 import typing
@@ -422,7 +422,7 @@ loop = asyncio.get_event_loop()
 loop.run_until_complete(h())
 loop.close()
 [out]
-_program.py:18: error: Incompatible return value type (got Future[Future[int]], expected Future[Future[Future[int]]])
+_program.py:18: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[Future[Future[int]]]")
 
 [case testErrorOneLessFutureInReturnType]
 import typing
@@ -456,7 +456,7 @@ loop = asyncio.get_event_loop()
 loop.run_until_complete(h())
 loop.close()
 [out]
-_program.py:18: error: Incompatible return value type (got Future[Future[int]], expected Future[int])
+_program.py:18: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[int]")
 
 [case testErrorAssignmentDifferentType]
 import typing
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
index 2287e0e..80bbaee 100644
--- a/test-data/unit/pythoneval.test
+++ b/test-data/unit/pythoneval.test
@@ -391,7 +391,7 @@ txt(sys.stdout)
 bin(sys.stdout)
 [out]
 _program.py:5: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str"
-_program.py:10: error: Argument 1 to "bin" has incompatible type "TextIO"; expected IO[bytes]
+_program.py:10: error: Argument 1 to "bin" has incompatible type "TextIO"; expected "IO[bytes]"
 
 [case testBuiltinOpen]
 f = open('x')
@@ -603,7 +603,7 @@ print(tuple(a))
 import typing
 [1] + iter([2, 3])
 [out]
-_program.py:2: error: Unsupported operand types for + (List[int] and Iterator[int])
+_program.py:2: error: Unsupported operand types for + ("List[int]" and "Iterator[int]")
 
 [case testInferHeterogeneousListOfIterables]
 from typing import Sequence
@@ -784,7 +784,7 @@ def f(*args: str) -> str: return args[0]
 map(f, ['x'])
 map(f, [1])
 [out]
-_program.py:4: error: Argument 1 to "map" has incompatible type Callable[[VarArg(str)], str]; expected Callable[[int], str]
+_program.py:4: error: Argument 1 to "map" has incompatible type "Callable[[VarArg(str)], str]"; expected "Callable[[int], str]"
 
 [case testMapStr]
 import typing
@@ -792,7 +792,7 @@ x = range(3)
 a = list(map(str, x))
 a + 1
 [out]
-_program.py:4: error: Unsupported operand types for + (List[str] and "int")
+_program.py:4: error: Unsupported operand types for + ("List[str]" and "int")
 
 [case testNamedTuple]
 import typing
@@ -994,7 +994,7 @@ def f(*x: int) -> None:
     x.append(1)
 f(1)
 [out]
-_program.py:3: error: Tuple[int, ...] has no attribute "append"
+_program.py:3: error: "Tuple[int, ...]" has no attribute "append"
 
 [case testExit]
 print('a')
@@ -1075,14 +1075,14 @@ n = 4
 t = ('',) * n
 t + 1
 [out]
-_program.py:3: error: Unsupported operand types for + (Tuple[str, ...] and "int")
+_program.py:3: error: Unsupported operand types for + ("Tuple[str, ...]" and "int")
 
 [case testMultiplyTupleByIntegerReverse]
 n = 4
 t = n * ('',)
 t + 1
 [out]
-_program.py:3: error: Unsupported operand types for + (Tuple[str, ...] and "int")
+_program.py:3: error: Unsupported operand types for + ("Tuple[str, ...]" and "int")
 
 [case testDictWithKeywordArgs]
 from typing import Dict, Any, List
@@ -1093,8 +1093,8 @@ d3.xyz # E
 d4 = dict(a=1, b='') # type: Dict[str, Any]
 result = dict(x=[], y=[]) # type: Dict[str, List[str]]
 [out]
-_program.py:3: error: Dict entry 1 has incompatible type "str": "str"
-_program.py:5: error: Dict[str, int] has no attribute "xyz"
+_program.py:3: error: Dict entry 1 has incompatible type "str": "str"; expected "str": "int"
+_program.py:5: error: "Dict[str, int]" has no attribute "xyz"
 
 [case testDefaultDict]
 import typing as t
@@ -1122,11 +1122,11 @@ class MyDDict(t.DefaultDict[int,T], t.Generic[T]):
 MyDDict(dict)['0']
 MyDDict(dict)[0]
 [out]
-_program.py:6: error: Argument 1 to "defaultdict" has incompatible type Type[List[Any]]; expected Callable[[], str]
-_program.py:9: error: Invalid index type "str" for defaultdict[int, str]; expected type "int"
+_program.py:6: error: Argument 1 to "defaultdict" has incompatible type "Type[List[Any]]"; expected "Callable[[], str]"
+_program.py:9: error: Invalid index type "str" for "defaultdict[int, str]"; expected type "int"
 _program.py:9: error: Incompatible types in assignment (expression has type "int", target has type "str")
-_program.py:19: error: Dict entry 0 has incompatible type "str": List[<nothing>]
-_program.py:23: error: Invalid index type "str" for MyDDict[Dict[_KT, _VT]]; expected type "int"
+_program.py:19: error: Dict entry 0 has incompatible type "str": "List[<nothing>]"; expected "int": "List[<nothing>]"
+_program.py:23: error: Invalid index type "str" for "MyDDict[Dict[_KT, _VT]]"; expected type "int"
 
 [case testNoSubcriptionOfStdlibCollections]
 import collections
@@ -1148,7 +1148,7 @@ def f(d: collections.defaultdict[int, str]) -> None:
 _program.py:5: error: "defaultdict" is not subscriptable
 _program.py:6: error: "Counter" is not subscriptable
 _program.py:9: error: "defaultdict" is not subscriptable
-_program.py:12: error: Invalid index type "int" for defaultdict[str, int]; expected type "str"
+_program.py:12: error: Invalid index type "int" for "defaultdict[str, int]"; expected type "str"
 _program.py:14: error: "defaultdict" is not subscriptable, use "typing.DefaultDict" instead
 
 [case testCollectionsAliases]
@@ -1176,7 +1176,7 @@ reveal_type(o6)
 
 [out]
 _testCollectionsAliases.py:5: error: Revealed type is 'collections.Counter[builtins.int]'
-_testCollectionsAliases.py:6: error: Invalid index type "str" for Counter[int]; expected type "int"
+_testCollectionsAliases.py:6: error: Invalid index type "str" for "Counter[int]"; expected type "int"
 _testCollectionsAliases.py:9: error: Revealed type is 'collections.ChainMap[builtins.int, builtins.str]'
 _testCollectionsAliases.py:12: error: Revealed type is 'collections.deque[builtins.int]'
 _testCollectionsAliases.py:15: error: Revealed type is 'collections.Counter[builtins.int*]'
@@ -1277,7 +1277,7 @@ re.subn(bpat, b'', b'')[0] + b''
 re.subn(bre, lambda m: b'', b'')[0] + b''
 re.subn(bpat, lambda m: b'', b'')[0] + b''
 [out]
-_program.py:7: error: Type argument 1 of "search" has incompatible value "object"
+_program.py:7: error: Value of type variable "AnyStr" of "search" cannot be "object"
 _program.py:9: error: Cannot infer type argument 1 of "search"
 
 [case testReModuleString]
@@ -1301,7 +1301,7 @@ re.subn(spat, '', '')[0] + ''
 re.subn(sre, lambda m: '', '')[0] + ''
 re.subn(spat, lambda m: '', '')[0] + ''
 [out]
-_program.py:7: error: Type argument 1 of "search" has incompatible value "object"
+_program.py:7: error: Value of type variable "AnyStr" of "search" cannot be "object"
 _program.py:9: error: Cannot infer type argument 1 of "search"
 
 [case testListSetitemTuple]
@@ -1380,6 +1380,14 @@ _testTypedDictMappingMethods.py:9: error: Revealed type is 'typing.AbstractSet[b
 _testTypedDictMappingMethods.py:10: error: Revealed type is 'typing.AbstractSet[Tuple[builtins.str*, builtins.int*]]'
 _testTypedDictMappingMethods.py:11: error: Revealed type is 'typing.ValuesView[builtins.int*]'
 
+[case testCrashOnComplexCheckWithNamedTupleNext]
+from typing import NamedTuple
+
+MyNamedTuple = NamedTuple('MyNamedTuple', [('parent', 'MyNamedTuple')]) # type: ignore
+def foo(mymap) -> MyNamedTuple:
+    return next((mymap[key] for key in mymap), None)
+[out]
+
 [case testCanConvertTypedDictToAnySuperclassOfMapping]
 from mypy_extensions import TypedDict
 from typing import Sized, Iterable, Container
@@ -1393,4 +1401,4 @@ c: Container[str] = p
 o: object = p
 it2: Iterable[int] = p
 [out]
-_testCanConvertTypedDictToAnySuperclassOfMapping.py:11: error: Incompatible types in assignment (expression has type "Point", variable has type Iterable[int])
+_testCanConvertTypedDictToAnySuperclassOfMapping.py:11: error: Incompatible types in assignment (expression has type "Point", variable has type "Iterable[int]")
diff --git a/test-data/unit/reports.test b/test-data/unit/reports.test
new file mode 100644
index 0000000..27ad06d
--- /dev/null
+++ b/test-data/unit/reports.test
@@ -0,0 +1,338 @@
+-- Tests for reports
+-- ------------------------------
+--
+-- This file follows syntax of cmdline.test
+
+-- ----------------------------------------
+
+[case testConfigErrorUnknownReport]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+bad_report = .
+[out]
+mypy.ini: [mypy]: Unrecognized report type: bad_report
+
+[case testCoberturaParser]
+# cmd: mypy --cobertura-xml-report build pkg
+[file pkg/__init__.py]
+[file pkg/a.py]
+from typing import Dict
+
+def foo() -> Dict:
+  z = {'hello': 'world'}
+  return z
+[file pkg/subpkg/__init__.py]
+[file pkg/subpkg/a.py]
+def bar() -> str:
+  return 'world'
+def untyped_function():
+  return 42
+[outfile build/cobertura.xml]
+<coverage timestamp="$TIMESTAMP" version="$VERSION" line-rate="0.8000" branch-rate="0">
+  <sources>
+    <source>$PWD</source>
+  </sources>
+  <packages>
+    <package complexity="1.0" name="pkg" branch-rate="0" line-rate="1.0000">
+      <classes>
+        <class complexity="1.0" filename="pkg/__init__.py" name="__init__.py" branch-rate="0" line-rate="1.0">
+          <methods/>
+          <lines/>
+        </class>
+        <class complexity="1.0" filename="pkg/a.py" name="a.py" branch-rate="0" line-rate="1.0000">
+          <methods/>
+          <lines>
+            <line branch="true" hits="1" number="3" precision="imprecise" condition-coverage="50% (1/2)"/>
+            <line branch="false" hits="1" number="4" precision="precise"/>
+            <line branch="false" hits="1" number="5" precision="precise"/>
+          </lines>
+        </class>
+      </classes>
+    </package>
+    <package complexity="1.0" name="pkg.subpkg" branch-rate="0" line-rate="0.5000">
+      <classes>
+        <class complexity="1.0" filename="pkg/subpkg/__init__.py" name="__init__.py" branch-rate="0" line-rate="1.0">
+          <methods/>
+          <lines/>
+        </class>
+        <class complexity="1.0" filename="pkg/subpkg/a.py" name="a.py" branch-rate="0" line-rate="0.5000">
+          <methods/>
+          <lines>
+            <line branch="false" hits="1" number="1" precision="precise"/>
+            <line branch="false" hits="0" number="3" precision="any"/>
+          </lines>
+        </class>
+      </classes>
+    </package>
+  </packages>
+</coverage>
+
+
+[case testAnyExprReportDivisionByZero]
+# cmd: mypy --any-exprs-report=out -c 'pass'
+
+[case testClassDefIsTreatedAsEmpty]
+# cmd: mypy --html-report report n.py
+[file n.py]
+class A(object):
+    pass
+
+[file report/mypy-html.css]
+[file report/index.html]
+[outfile report/html/n.py.html]
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<link rel="stylesheet" type="text/css" href="../mypy-html.css">
+</head>
+<body>
+<h2>n</h2>
+<table>
+<caption>n.py</caption>
+<tbody><tr>
+<td class="table-lines"><pre><span id="L1" class="lineno"><a class="lineno" href="#L1">1</a></span>
+<span id="L2" class="lineno"><a class="lineno" href="#L2">2</a></span>
+</pre></td>
+<td class="table-code"><pre><span class="line-empty" title="No Anys on this line!">class A(object):</span>
+<span class="line-empty" title="No Anys on this line!">    pass</span>
+</pre></td>
+</tr></tbody>
+</table>
+</body>
+</html>
+
+[case testTypeVarTreatedAsEmptyLine]
+# cmd: mypy --html-report report n.py
+
+[file n.py]
+from typing import TypeVar
+
+T = TypeVar('T')
+
+[file report/mypy-html.css]
+[file report/index.html]
+[outfile report/html/n.py.html]
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<link rel="stylesheet" type="text/css" href="../mypy-html.css">
+</head>
+<body>
+<h2>n</h2>
+<table>
+<caption>n.py</caption>
+<tbody><tr>
+<td class="table-lines"><pre><span id="L1" class="lineno"><a class="lineno" href="#L1">1</a></span>
+<span id="L2" class="lineno"><a class="lineno" href="#L2">2</a></span>
+<span id="L3" class="lineno"><a class="lineno" href="#L3">3</a></span>
+</pre></td>
+<td class="table-code"><pre><span class="line-empty" title="No Anys on this line!">from typing import TypeVar</span>
+<span class="line-empty" title="No Anys on this line!"></span>
+<span class="line-empty" title="No Anys on this line!">T = TypeVar('T')</span>
+</pre></td>
+</tr></tbody>
+</table>
+</body>
+</html>
+
+[case testUnreachableCodeMarkedAsAny]
+# cmd: mypy --html-report report n.py
+
+[file n.py]
+def bar(x):
+    # type: (str) -> None
+    print(x)
+    assert False
+    print(x)
+
+[file report/mypy-html.css]
+[file report/index.html]
+[outfile report/html/n.py.html]
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<link rel="stylesheet" type="text/css" href="../mypy-html.css">
+</head>
+<body>
+<h2>n</h2>
+<table>
+<caption>n.py</caption>
+<tbody><tr>
+<td class="table-lines"><pre><span id="L1" class="lineno"><a class="lineno" href="#L1">1</a></span>
+<span id="L2" class="lineno"><a class="lineno" href="#L2">2</a></span>
+<span id="L3" class="lineno"><a class="lineno" href="#L3">3</a></span>
+<span id="L4" class="lineno"><a class="lineno" href="#L4">4</a></span>
+<span id="L5" class="lineno"><a class="lineno" href="#L5">5</a></span>
+</pre></td>
+<td class="table-code"><pre><span class="line-precise" title="No Anys on this line!">def bar(x):</span>
+<span class="line-empty" title="No Anys on this line!">    # type: (str) -> None</span>
+<span class="line-precise" title="Any Types on this line:
+Explicit (x1)">    print(x)</span>
+<span class="line-empty" title="No Anys on this line!">    assert False</span>
+<span class="line-unanalyzed" title="No Anys on this line!">    print(x)</span>
+</pre></td>
+</tr></tbody>
+</table>
+</body>
+</html>
+
+[case testHtmlReportMemberExprNoUnanalyzed]
+# cmd: mypy --html-report report n.py
+
+[file n.py]
+import sys
+
+old_stdout = sys.stdout
+
+[file report/mypy-html.css]
+[file report/index.html]
+[outfile report/html/n.py.html]
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<link rel="stylesheet" type="text/css" href="../mypy-html.css">
+</head>
+<body>
+<h2>n</h2>
+<table>
+<caption>n.py</caption>
+<tbody><tr>
+<td class="table-lines"><pre><span id="L1" class="lineno"><a class="lineno" href="#L1">1</a></span>
+<span id="L2" class="lineno"><a class="lineno" href="#L2">2</a></span>
+<span id="L3" class="lineno"><a class="lineno" href="#L3">3</a></span>
+</pre></td>
+<td class="table-code"><pre><span class="line-empty" title="No Anys on this line!">import sys</span>
+<span class="line-empty" title="No Anys on this line!"></span>
+<span class="line-precise" title="No Anys on this line!">old_stdout = sys.stdout</span>
+</pre></td>
+</tr></tbody>
+</table>
+</body>
+</html>
+
+[case testAnyExprReportIncludesDeadCode]
+# cmd: mypy --any-exprs-report report i.py j.py
+
+[file i.py]
+def bar(x):
+    # type: (str) -> None
+    print(x)
+    assert False
+    print(x)  # dead code!
+
+[file j.py]
+def bar(x):
+    # type: (str) -> None
+    print(x)
+    assert False
+
+[file report/types-of-anys.txt]
+[outfile report/any-exprs.txt]
+ Name   Anys   Exprs   Coverage
+---------------------------------
+    i      1       7     85.71%
+    j      0       6    100.00%
+---------------------------------
+Total      1      13     92.31%
+
+[case testAnyExprReportHigherKindedTypesAreNotAny]
+# cmd: mypy --any-exprs-report report i.py
+
+[file i.py]
+from enum import Enum
+from mypy_extensions import TypedDict
+from typing import NewType, NamedTuple, TypeVar
+
+from typing import TypeVar
+
+T = TypeVar('T')  # no error
+
+def f(t: T) -> T:
+    return t
+
+Point = NamedTuple('Point', [('x', int), ('y', int)])  # no error
+
+def origin() -> Point:
+    return Point(x=0, y=0)
+
+NT = NewType('NT', int)  # no error
+
+def nt() -> NT:
+    return NT(1)
+
+E = Enum('E', '1, 2, 3')  # no error
+
+def k(s: E) -> None: pass
+
+Movie = TypedDict('Movie', {'name': str, 'year': int})
+
+def g(m: Movie) -> Movie:
+    return m
+
+[file report/types-of-anys.txt]
+[outfile report/any-exprs.txt]
+ Name   Anys   Exprs   Coverage
+---------------------------------
+    i      0      16    100.00%
+---------------------------------
+Total      0      16    100.00%
+
+
+[case testAnyExpressionsReportTypesOfAny]
+# cmd: mypy --any-exprs-report report n.py
+
+[file n.py]
+from typing import Any, List
+from nonexistent import C  # type: ignore
+
+def a(x) -> None:  # Unannotated
+    print(x)
+
+x: Any = 2  # Explicit
+y: C = None  # Unimported
+
+def b() -> List:  # Omitted Generics
+    return [1, 2, 3]
+
+g = 1
+z = g.does_not_exist()  # type: ignore  # Error
+
+
+[file report/any-exprs.txt]
+[outfile report/types-of-anys.txt]
+ Name   Unannotated   Explicit   Unimported   Omitted Generics   Error   Special Form
+---------------------------------------------------------------------------------------
+    n             2          3            2                  1       3              0
+---------------------------------------------------------------------------------------
+Total             2          3            2                  1       3              0
+
+[case testAnyExpressionsReportUnqualifiedError]
+# cmd: mypy --any-exprs-report report n.py
+
+[file n.py]
+z = does_not_exist()  # type: ignore  # Error
+
+[file report/any-exprs.txt]
+[outfile report/types-of-anys.txt]
+ Name   Unannotated   Explicit   Unimported   Omitted Generics   Error   Special Form
+---------------------------------------------------------------------------------------
+    n             0          0            0                  0       3              0
+---------------------------------------------------------------------------------------
+Total             0          0            0                  0       3              0
+
+[case testAnyExpressionsReportUntypedDef]
+# cmd: mypy --any-exprs-report report n.py
+
+[file n.py]
+def foo():
+    x = 0
+    f = 0
+
+[file report/any-exprs.txt]
+[outfile report/types-of-anys.txt]
+ Name   Unannotated   Explicit   Unimported   Omitted Generics   Error   Special Form
+---------------------------------------------------------------------------------------
+    n             0          0            0                  0       0              0
+---------------------------------------------------------------------------------------
+Total             0          0            0                  0       0              0
diff --git a/test-data/unit/semanal-abstractclasses.test b/test-data/unit/semanal-abstractclasses.test
index b5147bd..dfd5dee 100644
--- a/test-data/unit/semanal-abstractclasses.test
+++ b/test-data/unit/semanal-abstractclasses.test
@@ -13,7 +13,7 @@ MypyFile:1(
   Import:2(typing)
   ClassDef:4(
     A
-    Metaclass(ABCMeta)
+    Metaclass(NameExpr(ABCMeta [abc.ABCMeta]))
     Decorator:5(
       Var(g)
       FuncDef:6(
@@ -49,11 +49,11 @@ MypyFile:1(
   Import:2(typing)
   ClassDef:4(
     A
-    Metaclass(ABCMeta)
+    Metaclass(NameExpr(ABCMeta [abc.ABCMeta]))
     PassStmt:4())
   ClassDef:5(
     B
-    Metaclass(ABCMeta)
+    Metaclass(NameExpr(ABCMeta [abc.ABCMeta]))
     PassStmt:5())
   ClassDef:6(
     C
@@ -106,7 +106,7 @@ MypyFile:1(
   Import:3(typing)
   ClassDef:5(
     A
-    Metaclass(ABCMeta)
+    Metaclass(NameExpr(ABCMeta [abc.ABCMeta]))
     Decorator:6(
       Var(g)
       FuncDef:7(
diff --git a/test-data/unit/semanal-classes.test b/test-data/unit/semanal-classes.test
index 22beb8d..d68aa17 100644
--- a/test-data/unit/semanal-classes.test
+++ b/test-data/unit/semanal-classes.test
@@ -411,7 +411,9 @@ MypyFile:1(
   Import:1(abc)
   ClassDef:2(
     A
-    Metaclass(abc.ABCMeta)
+    Metaclass(MemberExpr:2(
+      NameExpr(abc)
+      ABCMeta [abc.ABCMeta]))
     PassStmt:2()))
 
 [case testStaticMethod]
diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test
index 2192bce..ccd13f1 100644
--- a/test-data/unit/semanal-errors.test
+++ b/test-data/unit/semanal-errors.test
@@ -953,7 +953,7 @@ from typing import Generic, TypeVar
 T = TypeVar('T')
 S = TypeVar('S')
 class A(Generic[T], Generic[S]): pass \
-      # E: Duplicate Generic in bases
+      # E: Only single Generic[...] or Protocol[...] can be in bases
 [out]
 
 [case testInvalidMetaclass]
@@ -1401,3 +1401,20 @@ class A: ... # E: Name 'A' already defined on line 2
 
 [builtins fixtures/list.pyi]
 [out]
+
+[case testNoInvalidTypeInDynamicFunctions]
+from typing import Dict, TypeVar
+T = TypeVar('T')
+
+def f():  # Note no annotation
+    x: Dict[str, T] = {}
+    y: T
+    z: x
+    def nested(): pass
+    t: nested
+
+def g() -> None:
+    x: Dict[str, T] = {}  # E: Invalid type "__main__.T"
+
+[builtins fixtures/dict.pyi]
+[out]
diff --git a/test-data/unit/semanal-typeddict.test b/test-data/unit/semanal-typeddict.test
index 9c1454e..4261c72 100644
--- a/test-data/unit/semanal-typeddict.test
+++ b/test-data/unit/semanal-typeddict.test
@@ -34,3 +34,24 @@ MypyFile:1(
   AssignmentStmt:2(
     NameExpr(Point* [__main__.Point])
     TypedDictExpr:2(Point)))
+
+[case testTypedDictWithDocString]
+from mypy_extensions import TypedDict
+class A(TypedDict):
+    """foo"""
+    x: str
+[builtins fixtures/dict.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(mypy_extensions, [TypedDict])
+  ClassDef:2(
+    A
+    BaseTypeExpr(
+      NameExpr(TypedDict [mypy_extensions.TypedDict]))
+    ExpressionStmt:3(
+      StrExpr(foo))
+    AssignmentStmt:4(
+      NameExpr(x)
+      TempNode:-1(
+        Any)
+      str?)))
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
index 2795bb3..6bb0724 100644
--- a/test-data/unit/stubgen.test
+++ b/test-data/unit/stubgen.test
@@ -57,6 +57,21 @@ from typing import Any
 
 def f(x: Any = ...): ...
 
+[case testPreserveFunctionAnnotation]
+def f(x: Foo) -> Bar: ...
+[out]
+def f(x: Foo) -> Bar: ...
+
+[case testPreserveVarAnnotation]
+x: Foo
+[out]
+x: Foo
+
+[case testPreserveVarAnnotationWithoutQuotes]
+x: 'Foo'
+[out]
+x: Foo
+
 [case testVarArgs]
 def f(x, *y): ...
 [out]
@@ -81,20 +96,45 @@ def g(): ...
 [case testVariable]
 x = 1
 [out]
-x = ...  # type: int
+x: int
+
+[case testAnnotatedVariable]
+x: int = 1
+[out]
+x: int
+
+[case testAnnotatedVariableGeneric]
+x: Foo[int, str] = ...
+[out]
+x: Foo[int, str]
+
+[case testAnnotatedVariableOldSyntax]
+x = 1  # type: int
+[out]
+x: int
+
+[case testAnnotatedVariableNone]
+x: None
+[out]
+x: None
+
+[case testAnnotatedVariableNoneOldSyntax]
+x = None  # type: None
+[out]
+x: None
 
 [case testMultipleVariable]
 x = y = 1
 [out]
-x = ...  # type: int
-y = ...  # type: int
+x: int
+y: int
 
 [case testClassVariable]
 class C:
     x = 1
 [out]
 class C:
-    x = ...  # type: int
+    x: int = ...
 
 [case testSelfAssignment]
 class C:
@@ -103,7 +143,7 @@ class C:
         x.y = 2
 [out]
 class C:
-    x = ...  # type: int
+    x: int = ...
     def __init__(self) -> None: ...
 
 [case testSelfAndClassBodyAssignment]
@@ -114,10 +154,10 @@ class C:
         self.x = 1
         self.x = 1
 [out]
-x = ...  # type: int
+x: int
 
 class C:
-    x = ...  # type: int
+    x: int = ...
     def __init__(self) -> None: ...
 
 [case testEmptyClass]
@@ -166,10 +206,10 @@ _x = 1
 class A:
     _y = 1
 [out]
-_x = ...  # type: int
+_x: int
 
 class A:
-    _y = ...  # type: int
+    _y: int = ...
 
 [case testSpecialInternalVar]
 __all__ = []
@@ -195,16 +235,22 @@ x, y = 1, 2
 [out]
 from typing import Any
 
-x = ...  # type: Any
-y = ...  # type: Any
+x: Any
+y: Any
+
+[case testMultipleAssignmentAnnotated]
+x, y = 1, "2" # type: int, str
+[out]
+x: int
+y: str
 
 [case testMultipleAssignment2]
 [x, y] = 1, 2
 [out]
 from typing import Any
 
-x = ...  # type: Any
-y = ...  # type: Any
+x: Any
+y: Any
 
 [case testKeywordOnlyArg]
 def f(x, *, y=1): ...
@@ -333,7 +379,7 @@ class A:
     def f(self): ...
 [out]
 class A:
-    x = ...  # type: int
+    x: int = ...
     def f(self): ...
 
 [case testSkipMultiplePrivateDefs]
@@ -356,9 +402,9 @@ class C: ...
 [out]
 class A: ...
 
-_x = ...  # type: int
-_y = ...  # type: int
-_z = ...  # type: int
+_x: int
+_y: int
+_z: int
 
 class C: ...
 
@@ -369,7 +415,17 @@ x = 1
 [out]
 from re import match as match, sub as sub
 
-x = ...  # type: int
+x: int
+
+[case testExportModule_import]
+import re
+__all__ = ['re', 'x']
+x = 1
+y = 2
+[out]
+import re as re
+
+x: int
 
 [case testExportModule_import]
 import re
@@ -379,7 +435,7 @@ y = 2
 [out]
 import re as re
 
-x = ...  # type: int
+x: int
 
 [case testExportModuleAs_import]
 import re as rex
@@ -389,7 +445,7 @@ y = 2
 [out]
 import re as rex
 
-x = ...  # type: int
+x: int
 
 [case testExportModuleInPackage_import]
 import urllib.parse as p
@@ -397,12 +453,12 @@ __all__ = ['p']
 [out]
 import urllib.parse as p
 
-[case testExportModuleInPackageUnsupported_import]
+[case testExportPackageOfAModule_import]
 import urllib.parse
 __all__ = ['urllib']
+
 [out]
-# Names in __all__ with no definition:
-#   urllib
+import urllib as urllib
 
 [case testRelativeImportAll]
 from .x import *
@@ -418,7 +474,7 @@ class C:
 [out]
 def f(): ...
 
-x = ...  # type: int
+x: int
 
 class C:
     def g(self): ...
@@ -494,7 +550,6 @@ def f(): ...
 X = _namedtuple('X', 'a b')
 def g(): ...
 [out]
-from collections import namedtuple as _namedtuple
 from collections import namedtuple
 
 def f(): ...
@@ -551,6 +606,11 @@ from x import X as _X
 
 class A(_X): ...
 
+[case testGenericClass]
+class D(Generic[T]): ...
+[out]
+class D(Generic[T]): ...
+
 [case testObjectBaseClass]
 class A(object): ...
 [out]
@@ -577,19 +637,19 @@ class A:
 [out]
 class A:
     class B:
-        x = ...  # type: int
+        x: int = ...
         def f(self): ...
     def g(self): ...
 
 [case testExportViaRelativeImport]
 from .api import get
 [out]
-from .api import get as get
+from .api import get
 
 [case testExportViaRelativePackageImport]
 from .packages.urllib3.contrib import parse
 [out]
-from .packages.urllib3.contrib import parse as parse
+from .packages.urllib3.contrib import parse
 
 [case testNoExportViaRelativeImport]
 from . import get
@@ -600,7 +660,7 @@ from .x import X
 class A(X):
      pass
 [out]
-from .x import X as X
+from .x import X
 
 class A(X): ...
 
@@ -619,14 +679,163 @@ def f(a): ...
 [case testInferOptionalOnlyFunc]
 class A:
     x = None
-    def __init__(self, a=None) -> None:
+    def __init__(self, a=None):
+        self.x = []
+    def method(self, a=None):
         self.x = []
 [out]
 from typing import Any, Optional
 
 class A:
-    x = ...  # type: Any
+    x: Any = ...
     def __init__(self, a: Optional[Any] = ...) -> None: ...
+    def method(self, a: Optional[Any] = ...): ...
+
+[case testAnnotationImportsFrom]
+import foo
+from collection import defaultdict
+x: defaultdict
+
+[out]
+from collection import defaultdict
+
+x: defaultdict
+
+[case testAnnotationImports]
+import foo
+import collection
+x: collection.defaultdict
+
+[out]
+import collection
+
+x: collection.defaultdict
+
+
+[case testAnnotationImports]
+from typing import List
+import collection
+x: List[collection.defaultdict]
+
+[out]
+import collection
+from typing import List
+
+x: List[collection.defaultdict]
+
+
+[case testAnnotationFwRefs]
+x: C
+
+class C:
+    attr: C
+
+y: C
+[out]
+x: C
+
+class C:
+    attr: C
+
+y: C
+
+[case testTypeVarPreserved]
+tv = TypeVar('tv')
+
+[out]
+from typing import TypeVar
+
+tv = TypeVar('tv')
+
+[case testTypeVarArgsPreserved]
+tv = TypeVar('tv', int, str)
+
+[out]
+from typing import TypeVar
+
+tv = TypeVar('tv', int, str)
+
+[case testTypeVarNamedArgsPreserved]
+tv = TypeVar('tv', bound=bool, covariant=True)
+
+[out]
+from typing import TypeVar
+
+tv = TypeVar('tv', bound=bool, covariant=True)
+
+[case testTypeAliasPreserved]
+alias = str
+
+[out]
+alias = str
+
+[case testDeepTypeAliasPreserved]
+
+alias = Dict[str, List[str]]
+
+[out]
+alias = Dict[str, List[str]]
+
+[case testDeepGenericTypeAliasPreserved]
+from typing import TypeVar
+
+T = TypeVar('T')
+alias = Union[T, List[T]]
+
+[out]
+from typing import TypeVar
+
+T = TypeVar('T')
+alias = Union[T, List[T]]
+
+[case testEllipsisAliasPreserved]
+
+alias = Tuple[int, ...]
+
+[out]
+alias = Tuple[int, ...]
+
+[case testCallableAliasPreserved]
+
+alias1 = Callable[..., int]
+alias2 = Callable[[str, bool], None]
+
+[out]
+alias1 = Callable[..., int]
+alias2 = Callable[[str, bool], None]
+
+[case testAliasPullsImport]
+from module import Container
+
+alias = Container[Any]
+
+[out]
+from module import Container
+from typing import Any
+
+alias = Container[Any]
+
+[case testAliasOnlyToplevel]
+class Foo:
+    alias = str
+
+[out]
+from typing import Any
+
+class Foo:
+    alias: Any = ...
+
+[case testAliasExceptions]
+noalias1 = None
+noalias2 = ...
+noalias3 = True
+
+[out]
+from typing import Any
+
+noalias1: Any
+noalias2: Any
+noalias3: bool
 
 -- More features/fixes:
 --   do not export deleted names
diff --git a/test-requirements.txt b/test-requirements.txt
deleted file mode 100644
index 62e2f4c..0000000
--- a/test-requirements.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-flake8
-flake8-bugbear; python_version >= '3.5'
-flake8-pyi; python_version >= '3.6'
-lxml; sys_platform != 'win32' or python_version == '3.5' or python_version == '3.6'
-typed-ast>=1.0.4,<1.1.0; sys_platform != 'win32' or python_version >= '3.5'
-pytest>=2.8
-pytest-xdist>=1.13
-pytest-cov>=2.4.0
-typing>=3.5.2; python_version < '3.5'
diff --git a/tmp-test-dirs/.gitignore b/tmp-test-dirs/.gitignore
deleted file mode 100644
index e6579d8..0000000
--- a/tmp-test-dirs/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-# This directory is used to store temporary directories for the testsuite.
-# If anything manages to exist here, it means python crashed instead of
-# calling tempfile.TemporaryDirectory's cleanup while unwinding.
-# Therefore, don't actually provide any ignore patterns.
diff --git a/typeshed/stdlib/2/ConfigParser.pyi b/typeshed/stdlib/2/ConfigParser.pyi
index 5191e99..b1cd517 100644
--- a/typeshed/stdlib/2/ConfigParser.pyi
+++ b/typeshed/stdlib/2/ConfigParser.pyi
@@ -78,10 +78,10 @@ class RawConfigParser:
     def optionxform(self, optionstr: str) -> str: ...
     def has_option(self, section: str, option: str) -> bool: ...
     def set(self, section: str, option: str, value: Any = ...) -> None: ...
-    def write(self, fp: file) -> None: ...
+    def write(self, fp: IO[str]) -> None: ...
     def remove_option(self, section: str, option: Any) -> bool: ...
     def remove_section(self, section: str) -> bool: ...
-    def _read(self, fp: file, fpname: str) -> None: ...
+    def _read(self, fp: IO[str], fpname: str) -> None: ...
 
 class ConfigParser(RawConfigParser):
     _KEYCRE = ...  # type: Any
diff --git a/typeshed/stdlib/2/__builtin__.pyi b/typeshed/stdlib/2/__builtin__.pyi
index a681ab6..b781292 100644
--- a/typeshed/stdlib/2/__builtin__.pyi
+++ b/typeshed/stdlib/2/__builtin__.pyi
@@ -600,9 +600,9 @@ class set(MutableSet[_T], Generic[_T]):
     def discard(self, element: _T) -> None: ...
     def intersection(self, *s: Iterable[Any]) -> Set[_T]: ...
     def intersection_update(self, *s: Iterable[Any]) -> None: ...
-    def isdisjoint(self, s: Iterable[Any]) -> bool: ...
-    def issubset(self, s: Iterable[Any]) -> bool: ...
-    def issuperset(self, s: Iterable[Any]) -> bool: ...
+    def isdisjoint(self, s: Iterable[object]) -> bool: ...
+    def issubset(self, s: Iterable[object]) -> bool: ...
+    def issuperset(self, s: Iterable[object]) -> bool: ...
     def pop(self) -> _T: ...
     def remove(self, element: _T) -> None: ...
     def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ...
@@ -613,18 +613,18 @@ class set(MutableSet[_T], Generic[_T]):
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_T]: ...
     def __str__(self) -> str: ...
-    def __and__(self, s: AbstractSet[Any]) -> Set[_T]: ...
-    def __iand__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+    def __and__(self, s: AbstractSet[object]) -> Set[_T]: ...
+    def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ...
     def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
     def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
-    def __sub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
-    def __isub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+    def __sub__(self, s: AbstractSet[object]) -> Set[_T]: ...
+    def __isub__(self, s: AbstractSet[object]) -> Set[_T]: ...
     def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
     def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
-    def __le__(self, s: AbstractSet[Any]) -> bool: ...
-    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
-    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
-    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __le__(self, s: AbstractSet[object]) -> bool: ...
+    def __lt__(self, s: AbstractSet[object]) -> bool: ...
+    def __ge__(self, s: AbstractSet[object]) -> bool: ...
+    def __gt__(self, s: AbstractSet[object]) -> bool: ...
     # TODO more set operations
 
 class frozenset(AbstractSet[_T], Generic[_T]):
@@ -633,11 +633,11 @@ class frozenset(AbstractSet[_T], Generic[_T]):
     @overload
     def __init__(self, iterable: Iterable[_T]) -> None: ...
     def copy(self) -> FrozenSet[_T]: ...
-    def difference(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
-    def intersection(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
+    def difference(self, *s: Iterable[object]) -> FrozenSet[_T]: ...
+    def intersection(self, *s: Iterable[object]) -> FrozenSet[_T]: ...
     def isdisjoint(self, s: Iterable[_T]) -> bool: ...
-    def issubset(self, s: Iterable[Any]) -> bool: ...
-    def issuperset(self, s: Iterable[Any]) -> bool: ...
+    def issubset(self, s: Iterable[object]) -> bool: ...
+    def issuperset(self, s: Iterable[object]) -> bool: ...
     def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ...
     def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ...
     def __len__(self) -> int: ...
@@ -648,10 +648,10 @@ class frozenset(AbstractSet[_T], Generic[_T]):
     def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
     def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ...
     def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
-    def __le__(self, s: AbstractSet[Any]) -> bool: ...
-    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
-    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
-    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __le__(self, s: AbstractSet[object]) -> bool: ...
+    def __lt__(self, s: AbstractSet[object]) -> bool: ...
+    def __ge__(self, s: AbstractSet[object]) -> bool: ...
+    def __gt__(self, s: AbstractSet[object]) -> bool: ...
 
 class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
     def __init__(self, iterable: Iterable[_T], start: int = 0) -> None: ...
@@ -689,8 +689,8 @@ bytes = str
 NotImplemented = ...  # type: Any
 
 def abs(n: SupportsAbs[_T]) -> _T: ...
-def all(i: Iterable) -> bool: ...
-def any(i: Iterable) -> bool: ...
+def all(i: Iterable[object]) -> bool: ...
+def any(i: Iterable[object]) -> bool: ...
 def bin(number: int) -> str: ...
 def callable(o: object) -> bool: ...
 def chr(code: int) -> str: ...
diff --git a/typeshed/stdlib/2/_io.pyi b/typeshed/stdlib/2/_io.pyi
index 0a0b6d0..3f1baab 100644
--- a/typeshed/stdlib/2/_io.pyi
+++ b/typeshed/stdlib/2/_io.pyi
@@ -92,7 +92,7 @@ class _RawIOBase(_IOBase):
 class FileIO(_RawIOBase, BytesIO):  # type: ignore  # for __enter__
     mode = ...  # type: str
     closefd = ...  # type: bool
-    def __init__(self, file: str, mode: str = ...) -> None: ...
+    def __init__(self, file: str, mode: str = ..., closefd: bool = ...) -> None: ...
     def readinto(self, buffer: bytearray)-> int: ...
     def write(self, pbuf: str) -> int: ...
 
diff --git a/typeshed/stdlib/2/ast.pyi b/typeshed/stdlib/2/ast.pyi
index d22a6f8..e4ba3b6 100644
--- a/typeshed/stdlib/2/ast.pyi
+++ b/typeshed/stdlib/2/ast.pyi
@@ -3,19 +3,8 @@
 import typing
 from typing import Any, Iterator, Union
 
-from _ast import (
-    Add, alias, And, arguments, Assert, Assign, AST, Attribute, AugAssign,
-    AugLoad, AugStore, BinOp, BitAnd, BitOr, BitXor, BoolOp, boolop, Break,
-    Call, ClassDef, cmpop, Compare, comprehension, Continue, Del, Delete, Dict,
-    DictComp, Div, Ellipsis, Eq, ExceptHandler, Exec, Expr, expr, Expression,
-    expr_context, ExtSlice, FloorDiv, For, FunctionDef, GeneratorExp, Global,
-    Gt, GtE, If, IfExp, Import, ImportFrom, In, Index, Interactive, Invert, Is,
-    IsNot, keyword, Lambda, List, ListComp, Load, LShift, Lt, LtE, Mod, mod,
-    Module, Mult, Name, Not, NotEq, NotIn, Num, operator, Or, Param, Pass, Pow,
-    Print, Raise, Repr, Return, RShift, Set, SetComp, Slice, slice, stmt,
-    Store, Str, Sub, Subscript, Suite, TryExcept, TryFinally, Tuple, UAdd,
-    UnaryOp, unaryop, USub, While, With, Yield
-)
+from _ast import *
+from _ast import AST, Module
 
 __version__ = ...  # type: str
 PyCF_ONLY_AST = ...  # type: int
diff --git a/typeshed/stdlib/2/builtins.pyi b/typeshed/stdlib/2/builtins.pyi
index a681ab6..b781292 100644
--- a/typeshed/stdlib/2/builtins.pyi
+++ b/typeshed/stdlib/2/builtins.pyi
@@ -600,9 +600,9 @@ class set(MutableSet[_T], Generic[_T]):
     def discard(self, element: _T) -> None: ...
     def intersection(self, *s: Iterable[Any]) -> Set[_T]: ...
     def intersection_update(self, *s: Iterable[Any]) -> None: ...
-    def isdisjoint(self, s: Iterable[Any]) -> bool: ...
-    def issubset(self, s: Iterable[Any]) -> bool: ...
-    def issuperset(self, s: Iterable[Any]) -> bool: ...
+    def isdisjoint(self, s: Iterable[object]) -> bool: ...
+    def issubset(self, s: Iterable[object]) -> bool: ...
+    def issuperset(self, s: Iterable[object]) -> bool: ...
     def pop(self) -> _T: ...
     def remove(self, element: _T) -> None: ...
     def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ...
@@ -613,18 +613,18 @@ class set(MutableSet[_T], Generic[_T]):
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_T]: ...
     def __str__(self) -> str: ...
-    def __and__(self, s: AbstractSet[Any]) -> Set[_T]: ...
-    def __iand__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+    def __and__(self, s: AbstractSet[object]) -> Set[_T]: ...
+    def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ...
     def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
     def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
-    def __sub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
-    def __isub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+    def __sub__(self, s: AbstractSet[object]) -> Set[_T]: ...
+    def __isub__(self, s: AbstractSet[object]) -> Set[_T]: ...
     def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
     def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
-    def __le__(self, s: AbstractSet[Any]) -> bool: ...
-    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
-    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
-    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __le__(self, s: AbstractSet[object]) -> bool: ...
+    def __lt__(self, s: AbstractSet[object]) -> bool: ...
+    def __ge__(self, s: AbstractSet[object]) -> bool: ...
+    def __gt__(self, s: AbstractSet[object]) -> bool: ...
     # TODO more set operations
 
 class frozenset(AbstractSet[_T], Generic[_T]):
@@ -633,11 +633,11 @@ class frozenset(AbstractSet[_T], Generic[_T]):
     @overload
     def __init__(self, iterable: Iterable[_T]) -> None: ...
     def copy(self) -> FrozenSet[_T]: ...
-    def difference(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
-    def intersection(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
+    def difference(self, *s: Iterable[object]) -> FrozenSet[_T]: ...
+    def intersection(self, *s: Iterable[object]) -> FrozenSet[_T]: ...
     def isdisjoint(self, s: Iterable[_T]) -> bool: ...
-    def issubset(self, s: Iterable[Any]) -> bool: ...
-    def issuperset(self, s: Iterable[Any]) -> bool: ...
+    def issubset(self, s: Iterable[object]) -> bool: ...
+    def issuperset(self, s: Iterable[object]) -> bool: ...
     def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ...
     def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ...
     def __len__(self) -> int: ...
@@ -648,10 +648,10 @@ class frozenset(AbstractSet[_T], Generic[_T]):
     def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
     def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ...
     def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
-    def __le__(self, s: AbstractSet[Any]) -> bool: ...
-    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
-    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
-    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __le__(self, s: AbstractSet[object]) -> bool: ...
+    def __lt__(self, s: AbstractSet[object]) -> bool: ...
+    def __ge__(self, s: AbstractSet[object]) -> bool: ...
+    def __gt__(self, s: AbstractSet[object]) -> bool: ...
 
 class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
     def __init__(self, iterable: Iterable[_T], start: int = 0) -> None: ...
@@ -689,8 +689,8 @@ bytes = str
 NotImplemented = ...  # type: Any
 
 def abs(n: SupportsAbs[_T]) -> _T: ...
-def all(i: Iterable) -> bool: ...
-def any(i: Iterable) -> bool: ...
+def all(i: Iterable[object]) -> bool: ...
+def any(i: Iterable[object]) -> bool: ...
 def bin(number: int) -> str: ...
 def callable(o: object) -> bool: ...
 def chr(code: int) -> str: ...
diff --git a/typeshed/stdlib/2/collections.pyi b/typeshed/stdlib/2/collections.pyi
index 1fab274..e0d983e 100644
--- a/typeshed/stdlib/2/collections.pyi
+++ b/typeshed/stdlib/2/collections.pyi
@@ -67,7 +67,7 @@ class Counter(Dict[_T, int], Generic[_T]):
     @overload
     def __init__(self, iterable: Iterable[_T]) -> None: ...
     def elements(self) -> Iterator[_T]: ...
-    def most_common(self, n: int = ...) -> List[_T]: ...
+    def most_common(self, n: Optional[int] = ...) -> List[Tuple[_T, int]]: ...
     @overload
     def subtract(self, __mapping: Mapping[_T, int]) -> None: ...
     @overload
diff --git a/typeshed/stdlib/2/email/mime/application.pyi b/typeshed/stdlib/2/email/mime/application.pyi
new file mode 100644
index 0000000..99da672
--- /dev/null
+++ b/typeshed/stdlib/2/email/mime/application.pyi
@@ -0,0 +1,11 @@
+# Stubs for email.mime.application
+
+from typing import Callable, Optional, Tuple, Union
+from email.mime.nonmultipart import MIMENonMultipart
+
+_ParamsType = Union[str, None, Tuple[str, Optional[str], str]]
+
+class MIMEApplication(MIMENonMultipart):
+    def __init__(self, _data: bytes, _subtype: str = ...,
+                 _encoder: Callable[[MIMEApplication], None] = ...,
+                 **_params: _ParamsType) -> None: ...
diff --git a/typeshed/stdlib/2/encodings/utf_8.pyi b/typeshed/stdlib/2/encodings/utf_8.pyi
index 0111184..d38bd58 100644
--- a/typeshed/stdlib/2/encodings/utf_8.pyi
+++ b/typeshed/stdlib/2/encodings/utf_8.pyi
@@ -1,14 +1,15 @@
 import codecs
+from typing import Text, Tuple
 
 class IncrementalEncoder(codecs.IncrementalEncoder):
-    pass
+    def encode(self, input: Text, final: bool = ...) -> bytes: ...
+
 class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
-    pass
-class StreamWriter(codecs.StreamWriter):
-    pass
-class StreamReader(codecs.StreamReader):
-    pass
+    def _buffer_decode(self, input: bytes, errors: str, final: bool) -> Tuple[Text, int]: ...
+
+class StreamWriter(codecs.StreamWriter): ...
+class StreamReader(codecs.StreamReader): ...
 
 def getregentry() -> codecs.CodecInfo: ...
-def encode(input: str, errors: str = ...) -> bytes: ...
-def decode(input: bytes, errors: str = ...) -> str: ...
+def encode(input: Text, errors: Text = ...) -> bytes: ...
+def decode(input: bytes, errors: Text = ...) -> Text: ...
diff --git a/typeshed/stdlib/2/exceptions.pyi b/typeshed/stdlib/2/exceptions.pyi
index 9b00e34..6e4bafc 100644
--- a/typeshed/stdlib/2/exceptions.pyi
+++ b/typeshed/stdlib/2/exceptions.pyi
@@ -1,80 +1,48 @@
-from typing import Any, Tuple, Optional
-
-class StandardError(Exception): ...
-class ArithmeticError(StandardError): ...
-class AssertionError(StandardError): ...
-class AttributeError(StandardError): ...
-class BaseException(object):
-    args = ...  # type: Tuple[Any, ...]
-    message = ...  # type: str
-    def __getslice__(self, start, end) -> Any: ...
-    def __getitem__(self, start, end) -> Any: ...
-    def __unicode__(self) -> unicode: ...
-class BufferError(StandardError): ...
-class BytesWarning(Warning): ...
-class DeprecationWarning(Warning): ...
-class EOFError(StandardError): ...
-class EnvironmentError(StandardError):
-    errno = ...  # type: int
-    strerror = ...  # type: str
-    filename = ...  # type: str
-class Exception(BaseException): ...
-class FloatingPointError(ArithmeticError): ...
-class FutureWarning(Warning): ...
-class GeneratorExit(BaseException): ...
-class IOError(EnvironmentError): ...
-class ImportError(StandardError): ...
-class ImportWarning(Warning): ...
-class IndentationError(SyntaxError): ...
-class IndexError(LookupError): ...
-class KeyError(LookupError): ...
-class KeyboardInterrupt(BaseException): ...
-class LookupError(StandardError): ...
-class MemoryError(StandardError): ...
-class NameError(StandardError): ...
-class NotImplementedError(RuntimeError): ...
-class OSError(EnvironmentError): ...
-class OverflowError(ArithmeticError): ...
-class PendingDeprecationWarning(Warning): ...
-class ReferenceError(StandardError): ...
-class RuntimeError(StandardError): ...
-class RuntimeWarning(Warning): ...
-class StopIteration(Exception): ...
-class SyntaxError(StandardError):
-    text = ...  # type: str
-    print_file_and_line = ...  # type: Optional[str]
-    filename = ...  # type: str
-    lineno = ...  # type: int
-    offset = ...  # type: int
-    msg = ...  # type: str
-class SyntaxWarning(Warning): ...
-class SystemError(StandardError): ...
-class SystemExit(BaseException):
-    code = ...  # type: int
-class TabError(IndentationError): ...
-class TypeError(StandardError): ...
-class UnboundLocalError(NameError): ...
-class UnicodeError(ValueError): ...
-class UnicodeDecodeError(UnicodeError):
-    start = ...  # type: int
-    reason = ...  # type: str
-    object = ...  # type: str
-    end = ...  # type: int
-    encoding = ...  # type: str
-class UnicodeEncodeError(UnicodeError):
-    start = ...  # type: int
-    reason = ...  # type: str
-    object = ...  # type: unicode
-    end = ...  # type: int
-    encoding = ...  # type: str
-class UnicodeTranslateError(UnicodeError):
-    start = ...  # type: int
-    reason = ...  # type: str
-    object = ...  # type: Any
-    end = ...  # type: int
-    encoding = ...  # type: str
-class UnicodeWarning(Warning): ...
-class UserWarning(Warning): ...
-class ValueError(StandardError): ...
-class Warning(Exception): ...
-class ZeroDivisionError(ArithmeticError): ...
+from __builtin__ import ArithmeticError as ArithmeticError
+from __builtin__ import AssertionError as AssertionError
+from __builtin__ import AttributeError as AttributeError
+from __builtin__ import BaseException as BaseException
+from __builtin__ import BufferError as BufferError
+from __builtin__ import BytesWarning as BytesWarning
+from __builtin__ import DeprecationWarning as DeprecationWarning
+from __builtin__ import EOFError as EOFError
+from __builtin__ import EnvironmentError as EnvironmentError
+from __builtin__ import Exception as Exception
+from __builtin__ import FloatingPointError as FloatingPointError
+from __builtin__ import FutureWarning as FutureWarning
+from __builtin__ import GeneratorExit as GeneratorExit
+from __builtin__ import IOError as IOError
+from __builtin__ import ImportError as ImportError
+from __builtin__ import ImportWarning as ImportWarning
+from __builtin__ import IndentationError as IndentationError
+from __builtin__ import IndexError as IndexError
+from __builtin__ import KeyError as KeyError
+from __builtin__ import KeyboardInterrupt as KeyboardInterrupt
+from __builtin__ import LookupError as LookupError
+from __builtin__ import MemoryError as MemoryError
+from __builtin__ import NameError as NameError
+from __builtin__ import NotImplementedError as NotImplementedError
+from __builtin__ import OSError as OSError
+from __builtin__ import OverflowError as OverflowError
+from __builtin__ import PendingDeprecationWarning as PendingDeprecationWarning
+from __builtin__ import ReferenceError as ReferenceError
+from __builtin__ import RuntimeError as RuntimeError
+from __builtin__ import RuntimeWarning as RuntimeWarning
+from __builtin__ import StandardError as StandardError
+from __builtin__ import StopIteration as StopIteration
+from __builtin__ import SyntaxError as SyntaxError
+from __builtin__ import SyntaxWarning as SyntaxWarning
+from __builtin__ import SystemError as SystemError
+from __builtin__ import SystemExit as SystemExit
+from __builtin__ import TabError as TabError
+from __builtin__ import TypeError as TypeError
+from __builtin__ import UnboundLocalError as UnboundLocalError
+from __builtin__ import UnicodeError as UnicodeError
+from __builtin__ import UnicodeDecodeError as UnicodeDecodeError
+from __builtin__ import UnicodeEncodeError as UnicodeEncodeError
+from __builtin__ import UnicodeTranslateError as UnicodeTranslateError
+from __builtin__ import UnicodeWarning as UnicodeWarning
+from __builtin__ import UserWarning as UserWarning
+from __builtin__ import ValueError as ValueError
+from __builtin__ import Warning as Warning
+from __builtin__ import ZeroDivisionError as ZeroDivisionError
diff --git a/typeshed/stdlib/2/fcntl.pyi b/typeshed/stdlib/2/fcntl.pyi
index 5e7da7f..c7ff895 100644
--- a/typeshed/stdlib/2/fcntl.pyi
+++ b/typeshed/stdlib/2/fcntl.pyi
@@ -1,4 +1,4 @@
-from typing import Any, Union
+from typing import Any, Union, IO
 import io
 
 FASYNC = ...  # type: int
@@ -72,7 +72,7 @@ LOCK_SH = ...  # type: int
 LOCK_UN = ...  # type: int
 LOCK_WRITE = ...  # type: int
 
-_ANYFILE = Union[int, io.IOBase]
+_ANYFILE = Union[int, IO]
 
 # TODO All these return either int or bytes depending on the value of
 # cmd (not on the type of arg).
diff --git a/typeshed/stdlib/2/functools.pyi b/typeshed/stdlib/2/functools.pyi
index f3bcc90..a0e1a8f 100644
--- a/typeshed/stdlib/2/functools.pyi
+++ b/typeshed/stdlib/2/functools.pyi
@@ -21,7 +21,7 @@ WRAPPER_ASSIGNMENTS = ...  # type: Sequence[str]
 WRAPPER_UPDATES = ...  # type: Sequence[str]
 
 def update_wrapper(wrapper: _AnyCallable, wrapped: _AnyCallable, assigned: Sequence[str] = ...,
-                   updated: Sequence[str] = ...) -> None: ...
+                   updated: Sequence[str] = ...) -> _AnyCallable: ...
 def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[_AnyCallable], _AnyCallable]: ...
 def total_ordering(cls: type) -> type: ...
 def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], Any]: ...
diff --git a/typeshed/stdlib/2/heapq.pyi b/typeshed/stdlib/2/heapq.pyi
index 4a7a65f..00abb31 100644
--- a/typeshed/stdlib/2/heapq.pyi
+++ b/typeshed/stdlib/2/heapq.pyi
@@ -1,4 +1,4 @@
-from typing import TypeVar, List, Iterable, Any, Callable
+from typing import TypeVar, List, Iterable, Any, Callable, Optional
 
 _T = TypeVar('_T')
 
@@ -11,5 +11,6 @@ def heapify(x: List[_T]) -> None: ...
 def heapreplace(heap: List[_T], item: _T) -> _T:
     raise IndexError()  # if heap is empty
 def merge(*iterables: Iterable[_T]) -> Iterable[_T]: ...
-def nlargest(n: int, iterable: Iterable[_T]) -> List[_T]: ...
+def nlargest(n: int, iterable: Iterable[_T],
+             key: Optional[Callable[[_T], Any]] = ...) -> List[_T]: ...
 def nsmallest(n: int, iterable: Iterable[_T]) -> List[_T]: ...
diff --git a/typeshed/stdlib/2/io.pyi b/typeshed/stdlib/2/io.pyi
index 0c01b2b..e78ec50 100644
--- a/typeshed/stdlib/2/io.pyi
+++ b/typeshed/stdlib/2/io.pyi
@@ -39,5 +39,7 @@ class RawIOBase(_io._RawIOBase, IOBase):
 class BufferedIOBase(_io._BufferedIOBase, IOBase):
     pass
 
-class TextIOBase(_io._TextIOBase, IOBase):  # type: ignore
+# Note: In the actual io.py, TextIOBase subclasses IOBase.
+# (Which we don't do here because we don't want to subclass both TextIO and BinaryIO.)
+class TextIOBase(_io._TextIOBase):
     pass
diff --git a/typeshed/stdlib/2/itertools.pyi b/typeshed/stdlib/2/itertools.pyi
index 9d60526..31e34be 100644
--- a/typeshed/stdlib/2/itertools.pyi
+++ b/typeshed/stdlib/2/itertools.pyi
@@ -40,7 +40,7 @@ def groupby(iterable: Iterable[_T],
 @overload
 def islice(iterable: Iterable[_T], stop: int) -> Iterator[_T]: ...
 @overload
-def islice(iterable: Iterable[_T], start: int, stop: Optional[int],
+def islice(iterable: Iterable[_T], start: Optional[int], stop: Optional[int],
            step: int = ...) -> Iterator[_T]: ...
 
 _T1 = TypeVar('_T1')
@@ -60,7 +60,7 @@ def imap(func: Callable[[_T1, _T2], _S],
 def starmap(func: Any, iterable: Iterable[Any]) -> Iterator[Any]: ...
 def takewhile(predicate: Callable[[_T], Any],
               iterable: Iterable[_T]) -> Iterator[_T]: ...
-def tee(iterable: Iterable[Any], n: int = ...) -> Iterator[Any]: ...
+def tee(iterable: Iterable[_T], n: int = ...) -> Tuple[Iterator[_T], ...]: ...
 
 @overload
 def izip(iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
diff --git a/typeshed/stdlib/2/mutex.pyi b/typeshed/stdlib/2/mutex.pyi
new file mode 100644
index 0000000..df1e71c
--- /dev/null
+++ b/typeshed/stdlib/2/mutex.pyi
@@ -0,0 +1,15 @@
+# Source: https://hg.python.org/cpython/file/2.7/Lib/mutex.py
+
+from collections import deque
+from typing import Any, Callable, TypeVar
+
+_ArgType = TypeVar('_ArgType')
+
+class mutex:
+    locked = ...  # type: bool
+    queue = ...  # type: deque
+    def __init__(self) -> None: ...
+    def test(self) -> bool: ...
+    def testandset(self) -> bool: ...
+    def lock(self, function: Callable[[_ArgType], Any], argument: _ArgType) -> None: ...
+    def unlock(self) -> None: ...
diff --git a/typeshed/stdlib/2/os/__init__.pyi b/typeshed/stdlib/2/os/__init__.pyi
index 8dad31d..3951ba6 100644
--- a/typeshed/stdlib/2/os/__init__.pyi
+++ b/typeshed/stdlib/2/os/__init__.pyi
@@ -3,6 +3,7 @@
 
 from builtins import OSError as error
 from io import TextIOWrapper as _TextIOWrapper
+from posix import stat_result as stat_result  # TODO: use this, see https://github.com/python/mypy/issues/3078
 import sys
 from typing import (
     Mapping, MutableMapping, Dict, List, Any, Tuple, Iterator, overload, Union, AnyStr,
@@ -126,6 +127,7 @@ _StatVFS = NamedTuple('_StatVFS', [('f_bsize', int), ('f_frsize', int), ('f_bloc
                                    ('f_bfree', int), ('f_bavail', int), ('f_files', int),
                                    ('f_ffree', int), ('f_favail', int), ('f_flag', int),
                                    ('f_namemax', int)])
+
 def ctermid() -> str: ...  # Unix only
 def getegid() -> int: ...  # Unix only
 def geteuid() -> int: ...  # Unix only
diff --git a/typeshed/stdlib/2/sre_constants.pyi b/typeshed/stdlib/2/sre_constants.pyi
new file mode 100644
index 0000000..4404873
--- /dev/null
+++ b/typeshed/stdlib/2/sre_constants.pyi
@@ -0,0 +1,94 @@
+# Source: https://hg.python.org/cpython/file/2.7/Lib/sre_constants.py
+
+from typing import Dict, List, TypeVar
+
+MAGIC = ...  # type: int
+MAXREPEAT = ...  # type: int
+
+class error(Exception): ...
+
+FAILURE = ...  # type: str
+SUCCESS = ...  # type: str
+ANY = ...  # type: str
+ANY_ALL = ...  # type: str
+ASSERT = ...  # type: str
+ASSERT_NOT = ...  # type: str
+AT = ...  # type: str
+BIGCHARSET = ...  # type: str
+BRANCH = ...  # type: str
+CALL = ...  # type: str
+CATEGORY = ...  # type: str
+CHARSET = ...  # type: str
+GROUPREF = ...  # type: str
+GROUPREF_IGNORE = ...  # type: str
+GROUPREF_EXISTS = ...  # type: str
+IN = ...  # type: str
+IN_IGNORE = ...  # type: str
+INFO = ...  # type: str
+JUMP = ...  # type: str
+LITERAL = ...  # type: str
+LITERAL_IGNORE = ...  # type: str
+MARK = ...  # type: str
+MAX_REPEAT = ...  # type: str
+MAX_UNTIL = ...  # type: str
+MIN_REPEAT = ...  # type: str
+MIN_UNTIL = ...  # type: str
+NEGATE = ...  # type: str
+NOT_LITERAL = ...  # type: str
+NOT_LITERAL_IGNORE = ...  # type: str
+RANGE = ...  # type: str
+REPEAT = ...  # type: str
+REPEAT_ONE = ...  # type: str
+SUBPATTERN = ...  # type: str
+MIN_REPEAT_ONE = ...  # type: str
+AT_BEGINNING = ...  # type: str
+AT_BEGINNING_LINE = ...  # type: str
+AT_BEGINNING_STRING = ...  # type: str
+AT_BOUNDARY = ...  # type: str
+AT_NON_BOUNDARY = ...  # type: str
+AT_END = ...  # type: str
+AT_END_LINE = ...  # type: str
+AT_END_STRING = ...  # type: str
+AT_LOC_BOUNDARY = ...  # type: str
+AT_LOC_NON_BOUNDARY = ...  # type: str
+AT_UNI_BOUNDARY = ...  # type: str
+AT_UNI_NON_BOUNDARY = ...  # type: str
+CATEGORY_DIGIT = ...  # type: str
+CATEGORY_NOT_DIGIT = ...  # type: str
+CATEGORY_SPACE = ...  # type: str
+CATEGORY_NOT_SPACE = ...  # type: str
+CATEGORY_WORD = ...  # type: str
+CATEGORY_NOT_WORD = ...  # type: str
+CATEGORY_LINEBREAK = ...  # type: str
+CATEGORY_NOT_LINEBREAK = ...  # type: str
+CATEGORY_LOC_WORD = ...  # type: str
+CATEGORY_LOC_NOT_WORD = ...  # type: str
+CATEGORY_UNI_DIGIT = ...  # type: str
+CATEGORY_UNI_NOT_DIGIT = ...  # type: str
+CATEGORY_UNI_SPACE = ...  # type: str
+CATEGORY_UNI_NOT_SPACE = ...  # type: str
+CATEGORY_UNI_WORD = ...  # type: str
+CATEGORY_UNI_NOT_WORD = ...  # type: str
+CATEGORY_UNI_LINEBREAK = ...  # type: str
+CATEGORY_UNI_NOT_LINEBREAK = ...  # type: str
+
+_T = TypeVar('_T')
+def makedict(list: List[_T]) -> Dict[_T, int]: ...
+
+OP_IGNORE = ...  # type: Dict[str, str]
+AT_MULTILINE = ...  # type: Dict[str, str]
+AT_LOCALE = ...  # type: Dict[str, str]
+AT_UNICODE = ...  # type: Dict[str, str]
+CH_LOCALE = ...  # type: Dict[str, str]
+CH_UNICODE = ...  # type: Dict[str, str]
+SRE_FLAG_TEMPLATE = ...  # type: int
+SRE_FLAG_IGNORECASE = ...  # type: int
+SRE_FLAG_LOCALE = ...  # type: int
+SRE_FLAG_MULTILINE = ...  # type: int
+SRE_FLAG_DOTALL = ...  # type: int
+SRE_FLAG_UNICODE = ...  # type: int
+SRE_FLAG_VERBOSE = ...  # type: int
+SRE_FLAG_DEBUG = ...  # type: int
+SRE_INFO_PREFIX = ...  # type: int
+SRE_INFO_LITERAL = ...  # type: int
+SRE_INFO_CHARSET = ...  # type: int
diff --git a/typeshed/stdlib/2/sre_parse.pyi b/typeshed/stdlib/2/sre_parse.pyi
new file mode 100644
index 0000000..544a618
--- /dev/null
+++ b/typeshed/stdlib/2/sre_parse.pyi
@@ -0,0 +1,63 @@
+# Source: https://hg.python.org/cpython/file/2.7/Lib/sre_parse.py
+
+from typing import Any, Dict, Iterable, List, Match, Optional, Pattern as _Pattern, Set, Tuple, Union
+
+SPECIAL_CHARS = ...  # type: str
+REPEAT_CHARS = ...  # type:     str
+DIGITS = ...  # type: Set
+OCTDIGITS = ...  # type: Set
+HEXDIGITS = ...  # type: Set
+WHITESPACE = ...  # type: Set
+ESCAPES = ...  # type: Dict[str, Tuple[str, int]]
+CATEGORIES = ...  # type: Dict[str, Union[Tuple[str, str], Tuple[str, List[Tuple[str, str]]]]]
+FLAGS = ...  # type: Dict[str, int]
+
+class Pattern:
+    flags = ...  # type: int
+    open = ...  # type: List[int]
+    groups = ...  # type: int
+    groupdict = ...  # type: Dict[str, int]
+    lookbehind = ...  # type: int
+    def __init__(self) -> None: ...
+    def opengroup(self, name: str = ...) -> int: ...
+    def closegroup(self, gid: int) -> None: ...
+    def checkgroup(self, gid: int) -> bool: ...
+
+
+_OpSubpatternType = Tuple[Optional[int], int, int, SubPattern]
+_OpGroupRefExistsType = Tuple[int, SubPattern, SubPattern]
+_OpInType = List[Tuple[str, int]]
+_OpBranchType = Tuple[None, List[SubPattern]]
+_AvType = Union[_OpInType, _OpBranchType, Iterable[SubPattern], _OpGroupRefExistsType, _OpSubpatternType]
+_CodeType = Union[str, _AvType]
+
+class SubPattern:
+    pattern = ...  # type: str
+    data = ...  # type: List[_CodeType]
+    width = ...  # type: Optional[int]
+    def __init__(self, pattern, data: List[_CodeType] = ...) -> None: ...
+    def dump(self, level: int = ...) -> None: ...
+    def __len__(self) -> int: ...
+    def __delitem__(self, index: Union[int, slice]) -> None: ...
+    def __getitem__(self, index: Union[int, slice]) -> Union[SubPattern, _CodeType]: ...
+    def __setitem__(self, index: Union[int, slice], code: _CodeType): ...
+    def insert(self, index, code: _CodeType) -> None: ...
+    def append(self, code: _CodeType) -> None: ...
+    def getwidth(self) -> int: ...
+
+class Tokenizer:
+    string = ...  # type: str
+    index = ...  # type: int
+    def __init__(self, string: str) -> None: ...
+    def match(self, char: str, skip: int = ...) -> int: ...
+    def get(self) -> Optional[str]: ...
+    def tell(self) -> Tuple[int, Optional[str]]: ...
+    def seek(self, index: int) -> None: ...
+
+def isident(char: str) -> bool: ...
+def isdigit(char: str) -> bool: ...
+def isname(name: str) -> bool: ...
+def parse(str: str, flags: int = ..., pattern: Pattern = ...) -> SubPattern: ...
+_Template = Tuple[List[Tuple[int, int]], List[Optional[int]]]
+def parse_template(source: str, pattern: _Pattern) -> _Template: ...
+def expand_template(template: _Template, match: Match) -> str: ...
diff --git a/typeshed/stdlib/2/stringold.pyi b/typeshed/stdlib/2/stringold.pyi
new file mode 100644
index 0000000..7d31ebe
--- /dev/null
+++ b/typeshed/stdlib/2/stringold.pyi
@@ -0,0 +1,46 @@
+# Source: https://hg.python.org/cpython/file/2.7/Lib/stringold.py
+from typing import AnyStr, Iterable, List, Optional, Type
+
+whitespace = ...  # type: str
+lowercase = ...  # type: str
+uppercase = ...  # type: str
+letters = ...  # type: str
+digits = ...  # type: str
+hexdigits = ...  # type: str
+octdigits = ...  # type: str
+_idmap = ...  # type: str
+_idmapL = ...  # type: Optional[List[str]]
+index_error = ValueError
+atoi_error = ValueError
+atof_error = ValueError
+atol_error = ValueError
+
+
+def lower(s: AnyStr) -> AnyStr: ...
+def upper(s: AnyStr) -> AnyStr: ...
+def swapcase(s: AnyStr) -> AnyStr: ...
+def strip(s: AnyStr) -> AnyStr: ...
+def lstrip(s: AnyStr) -> AnyStr: ...
+def rstrip(s: AnyStr) -> AnyStr: ...
+def split(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ...
+def splitfields(s: AnyStr, sep: AnyStr = ..., maxsplit: int = ...) -> List[AnyStr]: ...
+def join(words: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ...
+def joinfields(words: Iterable[AnyStr], sep: AnyStr = ...) -> AnyStr: ...
+def index(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+def rindex(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+def count(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+def find(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+def rfind(s: unicode, sub: unicode, start: int = ..., end: int = ...) -> int: ...
+def atof(s: unicode) -> float: ...
+def atoi(s: unicode, base: int = ...) -> int: ...
+def atol(s: unicode, base: int = ...) -> long: ...
+def ljust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ...
+def rjust(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ...
+def center(s: AnyStr, width: int, fillchar: AnyStr = ...) -> AnyStr: ...
+def zfill(s: AnyStr, width: int) -> AnyStr: ...
+def expandtabs(s: AnyStr, tabsize: int = ...) -> AnyStr: ...
+def translate(s: str, table: str, deletions: str = ...) -> str: ...
+def capitalize(s: AnyStr) -> AnyStr: ...
+def capwords(s: AnyStr, sep: AnyStr = ...) -> AnyStr: ...
+def maketrans(fromstr: str, tostr: str) -> str: ...
+def replace(s: AnyStr, old: AnyStr, new: AnyStr, maxreplace: int = ...) -> AnyStr: ...
diff --git a/typeshed/stdlib/2/symbol.pyi b/typeshed/stdlib/2/symbol.pyi
index dd33444..ef41c81 100644
--- a/typeshed/stdlib/2/symbol.pyi
+++ b/typeshed/stdlib/2/symbol.pyi
@@ -88,4 +88,4 @@ testlist1 = ...  # type: int
 encoding_decl = ...  # type: int
 yield_expr = ...  # type: int
 
-symbol = ...  # type: Dict[int, str]
+sym_name = ...  # type: Dict[int, str]
diff --git a/typeshed/stdlib/2/thread.pyi b/typeshed/stdlib/2/thread.pyi
index a54a946..2f0483f 100644
--- a/typeshed/stdlib/2/thread.pyi
+++ b/typeshed/stdlib/2/thread.pyi
@@ -13,7 +13,7 @@ class LockType:
     def locked(self) -> bool: ...
     def locked_lock(self) -> bool: ...
     def __enter__(self) -> LockType: ...
-    def __exit__(self, value: Any, traceback: Any) -> None: ...
+    def __exit__(self, typ: Any, value: Any, traceback: Any) -> None: ...
 
 class _local(object):
     pass
diff --git a/typeshed/stdlib/2/types.pyi b/typeshed/stdlib/2/types.pyi
index e4df50d..87380b3 100644
--- a/typeshed/stdlib/2/types.pyi
+++ b/typeshed/stdlib/2/types.pyi
@@ -81,10 +81,11 @@ class ClassType: ...
 class UnboundMethodType:
     im_class = ...  # type: type
     im_func = ...  # type: FunctionType
-    im_self = ...  # type: Optional[object]
+    im_self = ...  # type: object
     __name__ = ...  # type: str
     __func__ = im_func
     __self__ = im_self
+    def __init__(self, func: Callable, obj: object) -> None: ...
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
 
 class InstanceType:
diff --git a/typeshed/stdlib/2/typing.pyi b/typeshed/stdlib/2/typing.pyi
index 84cd0d4..d8c49a4 100644
--- a/typeshed/stdlib/2/typing.pyi
+++ b/typeshed/stdlib/2/typing.pyi
@@ -24,6 +24,11 @@ ClassVar: _SpecialForm = ...
 
 class GenericMeta(type): ...
 
+# Return type that indicates a function does not return.
+# This type is equivalent to the None type, but the no-op Union is necessary to
+# distinguish the None type from the None value.
+NoReturn = Union[None]
+
 # Type aliases and type constructors
 
 class TypeAlias:
diff --git a/typeshed/stdlib/2and3/argparse.pyi b/typeshed/stdlib/2and3/argparse.pyi
index c481d9e..323ea46 100644
--- a/typeshed/stdlib/2and3/argparse.pyi
+++ b/typeshed/stdlib/2and3/argparse.pyi
@@ -117,8 +117,10 @@ class Action:
                  option_string: _Text = ...) -> None: ...
 
 class Namespace:
+    def __init__(self, **kwargs: Any) -> None: ...
     def __getattr__(self, name: _Text) -> Any: ...
     def __setattr__(self, name: _Text, value: Any) -> None: ...
+    def __contains__(self, key: str) -> bool: ...
 
 class FileType:
     if sys.version_info >= (3, 4):
diff --git a/typeshed/stdlib/2and3/chunk.pyi b/typeshed/stdlib/2and3/chunk.pyi
new file mode 100644
index 0000000..79255c2
--- /dev/null
+++ b/typeshed/stdlib/2and3/chunk.pyi
@@ -0,0 +1,23 @@
+# Source(py2): https://hg.python.org/cpython/file/2.7/Lib/chunk.py
+# Source(py3): https://github.com/python/cpython/blob/master/Lib/chunk.py
+
+from typing import IO
+
+class Chunk:
+    closed = ...  # type: bool
+    align = ...  # type: bool
+    file = ...  # type: IO[bytes]
+    chunkname = ...  # type: bytes
+    chunksize = ...  # type: int
+    size_read = ...  # type: int
+    offset = ...  # type: int
+    seekable = ...  # type: bool
+    def __init__(self, file: IO[bytes], align: bool = ..., bigendian: bool = ..., inclheader: bool = ...) -> None: ...
+    def getname(self) -> bytes: ...
+    def getsize(self) -> int: ...
+    def close(self) -> None: ...
+    def isatty(self) -> bool: ...
+    def seek(self, pos: int, whence: int = ...) -> None: ...
+    def tell(self) -> int: ...
+    def read(self, size: int = ...) -> bytes: ...
+    def skip(self) -> None: ...
diff --git a/typeshed/stdlib/2and3/codeop.pyi b/typeshed/stdlib/2and3/codeop.pyi
new file mode 100644
index 0000000..826e408
--- /dev/null
+++ b/typeshed/stdlib/2and3/codeop.pyi
@@ -0,0 +1,17 @@
+# Source(py2): https://hg.python.org/cpython/file/2.7/Lib/codeop.py
+# Source(py3): https://github.com/python/cpython/blob/master/Lib/codeop.py
+
+from types import CodeType
+from typing import Optional
+
+def compile_command(source: str, filename: str = ..., symbol: str = ...) -> Optional[CodeType]: ...
+
+class Compile:
+    flags = ...  # type: int
+    def __init__(self) -> None: ...
+    def __call__(self, source: str, filename: str, symbol: str) -> CodeType: ...
+
+class CommandCompiler:
+    compiler = ...  # type: Compile
+    def __init__(self) -> None: ...
+    def __call__(self, source: str, filename: str = ..., symbol: str = ...) -> Optional[CodeType]: ...
diff --git a/typeshed/stdlib/2and3/distutils/command/bdist_msi.pyi b/typeshed/stdlib/2and3/distutils/command/bdist_msi.pyi
index 3813699..a761792 100644
--- a/typeshed/stdlib/2and3/distutils/command/bdist_msi.pyi
+++ b/typeshed/stdlib/2and3/distutils/command/bdist_msi.pyi
@@ -1,5 +1,6 @@
-# Stubs for distutils.command.bdist_msi
-
 from distutils.cmd import Command
 
-class bdist_msi(Command): ...
+class bdist_msi(Command):
+    def initialize_options(self) -> None: ...
+    def finalize_options(self) -> None: ...
+    def run(self) -> None: ...
diff --git a/typeshed/stdlib/2and3/distutils/command/build_py.pyi b/typeshed/stdlib/2and3/distutils/command/build_py.pyi
index 23b3f9e..34753e4 100644
--- a/typeshed/stdlib/2and3/distutils/command/build_py.pyi
+++ b/typeshed/stdlib/2and3/distutils/command/build_py.pyi
@@ -1,8 +1,10 @@
-# Stubs for distutils.command.bdist_msi
-
 from distutils.cmd import Command
 import sys
 
 if sys.version_info >= (3,):
-    class build_py(Command): ...
-    class build_py_2to3(Command): ...
+    class build_py(Command):
+        def initialize_options(self) -> None: ...
+        def finalize_options(self) -> None: ...
+        def run(self) -> None: ...
+
+    class build_py_2to3(build_py): ...
diff --git a/typeshed/stdlib/2and3/formatter.pyi b/typeshed/stdlib/2and3/formatter.pyi
new file mode 100644
index 0000000..9ad2978
--- /dev/null
+++ b/typeshed/stdlib/2and3/formatter.pyi
@@ -0,0 +1,105 @@
+# Source: https://hg.python.org/cpython/file/2.7/Lib/formatter.py
+# and https://github.com/python/cpython/blob/master/Lib/formatter.py
+from typing import Any, IO, List, Optional, Tuple
+
+AS_IS = None
+_FontType = Tuple[str, bool, bool, bool]
+_StylesType = Tuple[Any, ...]
+
+class NullFormatter:
+    writer = ...  # type: Optional[NullWriter]
+    def __init__(self, writer: Optional[NullWriter] = ...) -> None: ...
+    def end_paragraph(self, blankline: int) -> None: ...
+    def add_line_break(self) -> None: ...
+    def add_hor_rule(self, *args, **kw) -> None: ...
+    def add_label_data(self, format, counter: int, blankline: Optional[int] = ...) -> None: ...
+    def add_flowing_data(self, data: str) -> None: ...
+    def add_literal_data(self, data: str) -> None: ...
+    def flush_softspace(self) -> None: ...
+    def push_alignment(self, align: Optional[str]) -> None: ...
+    def pop_alignment(self) -> None: ...
+    def push_font(self, x: _FontType) -> None: ...
+    def pop_font(self) -> None: ...
+    def push_margin(self, margin: int) -> None: ...
+    def pop_margin(self) -> None: ...
+    def set_spacing(self, spacing: Optional[str]) -> None: ...
+    def push_style(self, *styles: _StylesType) -> None: ...
+    def pop_style(self, n: int = ...) -> None: ...
+    def assert_line_data(self, flag: int = ...) -> None: ...
+
+class AbstractFormatter:
+    writer = ...  # type: NullWriter
+    align = ...  # type: Optional[str]
+    align_stack = ...  # type: List[Optional[str]]
+    font_stack = ...  # type: List[_FontType]
+    margin_stack = ...  # type: List[int]
+    spacing = ...  # type: Optional[str]
+    style_stack = ...  # type: Any
+    nospace = ...  # type: int
+    softspace = ...  # type: int
+    para_end = ...  # type: int
+    parskip = ...  # type: int
+    hard_break = ...  # type: int
+    have_label = ...  # type: int
+    def __init__(self, writer: NullWriter) -> None: ...
+    def end_paragraph(self, blankline: int) -> None: ...
+    def add_line_break(self) -> None: ...
+    def add_hor_rule(self, *args, **kw) -> None: ...
+    def add_label_data(self, format, counter: int, blankline: Optional[int] = ...) -> None: ...
+    def format_counter(self, format, counter: int) -> str: ...
+    def format_letter(self, case: str, counter: int) -> str: ...
+    def format_roman(self, case: str, counter: int) -> str: ...
+    def add_flowing_data(self, data: str) -> None: ...
+    def add_literal_data(self, data: str) -> None: ...
+    def flush_softspace(self) -> None: ...
+    def push_alignment(self, align: Optional[str]) -> None: ...
+    def pop_alignment(self) -> None: ...
+    def push_font(self, font: _FontType) -> None: ...
+    def pop_font(self) -> None: ...
+    def push_margin(self, margin: int) -> None: ...
+    def pop_margin(self) -> None: ...
+    def set_spacing(self, spacing: Optional[str]) -> None: ...
+    def push_style(self, *styles: _StylesType) -> None: ...
+    def pop_style(self, n: int = ...) -> None: ...
+    def assert_line_data(self, flag: int = ...) -> None: ...
+
+class NullWriter:
+    def __init__(self) -> None: ...
+    def flush(self) -> None: ...
+    def new_alignment(self, align: Optional[str]) -> None: ...
+    def new_font(self, font: _FontType) -> None: ...
+    def new_margin(self, margin: int, level: int) -> None: ...
+    def new_spacing(self, spacing: Optional[str]) -> None: ...
+    def new_styles(self, styles) -> None: ...
+    def send_paragraph(self, blankline: int) -> None: ...
+    def send_line_break(self) -> None: ...
+    def send_hor_rule(self, *args, **kw) -> None: ...
+    def send_label_data(self, data: str) -> None: ...
+    def send_flowing_data(self, data: str) -> None: ...
+    def send_literal_data(self, data: str) -> None: ...
+
+class AbstractWriter(NullWriter):
+    def new_alignment(self, align: Optional[str]) -> None: ...
+    def new_font(self, font: _FontType) -> None: ...
+    def new_margin(self, margin: int, level: int) -> None: ...
+    def new_spacing(self, spacing: Optional[str]) -> None: ...
+    def new_styles(self, styles) -> None: ...
+    def send_paragraph(self, blankline: int) -> None: ...
+    def send_line_break(self) -> None: ...
+    def send_hor_rule(self, *args, **kw) -> None: ...
+    def send_label_data(self, data: str) -> None: ...
+    def send_flowing_data(self, data: str) -> None: ...
+    def send_literal_data(self, data: str) -> None: ...
+
+class DumbWriter(NullWriter):
+    file = ...  # type: IO
+    maxcol = ...  # type: int
+    def __init__(self, file: Optional[IO] = ..., maxcol: int = ...) -> None: ...
+    def reset(self) -> None: ...
+    def send_paragraph(self, blankline: int) -> None: ...
+    def send_line_break(self) -> None: ...
+    def send_hor_rule(self, *args, **kw) -> None: ...
+    def send_literal_data(self, data: str) -> None: ...
+    def send_flowing_data(self, data: str) -> None: ...
+
+def test(file: Optional[str] = ...) -> None: ...
diff --git a/typeshed/stdlib/2and3/ftplib.pyi b/typeshed/stdlib/2and3/ftplib.pyi
index 308544d..128b91d 100644
--- a/typeshed/stdlib/2and3/ftplib.pyi
+++ b/typeshed/stdlib/2and3/ftplib.pyi
@@ -1,6 +1,6 @@
 # Stubs for ftplib (Python 2.7/3)
 import sys
-from typing import Optional, BinaryIO, Tuple, TextIO, Iterable, Callable, List, Union, Iterator, Dict, Text, TypeVar, Generic
+from typing import Optional, BinaryIO, Tuple, TextIO, Iterable, Callable, List, Union, Iterator, Dict, Text, TypeVar, Generic, Any
 from types import TracebackType
 from socket import socket
 from ssl import SSLContext
@@ -82,11 +82,11 @@ class FTP:
     # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers.
     def ntransfercmd(self, cmd: Text, rest: Optional[_IntOrStr] = ...) -> Tuple[socket, int]: ...
     def transfercmd(self, cmd: Text, rest: Optional[_IntOrStr] = ...) -> socket: ...
-    def retrbinary(self, cmd: Text, callback: Callable[[bytes], None], blocksize: int = ..., rest: Optional[_IntOrStr] = ...) -> str: ...
-    def storbinary(self, cmd: Text, fp: BinaryIO, blocksize: int = ..., callback: Optional[Callable[[bytes], None]] = ..., rest: Optional[_IntOrStr] = ...) -> str: ...
+    def retrbinary(self, cmd: Text, callback: Callable[[bytes], Any], blocksize: int = ..., rest: Optional[_IntOrStr] = ...) -> str: ...
+    def storbinary(self, cmd: Text, fp: BinaryIO, blocksize: int = ..., callback: Optional[Callable[[bytes], Any]] = ..., rest: Optional[_IntOrStr] = ...) -> str: ...
 
-    def retrlines(self, cmd: Text, callback: Optional[Callable[[str], None]] = ...) -> str: ...
-    def storlines(self, cmd: Text, fp: BinaryIO, callback: Optional[Callable[[bytes], None]] = ...) -> str: ...
+    def retrlines(self, cmd: Text, callback: Optional[Callable[[str], Any]] = ...) -> str: ...
+    def storlines(self, cmd: Text, fp: BinaryIO, callback: Optional[Callable[[bytes], Any]] = ...) -> str: ...
 
     def acct(self, password: Text) -> str: ...
     def nlst(self, *args: Text) -> List[str]: ...
diff --git a/typeshed/stdlib/2and3/logging/__init__.pyi b/typeshed/stdlib/2and3/logging/__init__.pyi
index ea3c419..2b60503 100644
--- a/typeshed/stdlib/2and3/logging/__init__.pyi
+++ b/typeshed/stdlib/2and3/logging/__init__.pyi
@@ -366,6 +366,9 @@ if sys.version_info >= (3,):
 
 
 class StreamHandler(Handler):
+    stream = ...  # type IO[str]
+    if sys.version_info >= (3,):
+        terminator = ...  # type: str
     def __init__(self, stream: Optional[IO[str]] = ...) -> None: ...
 
 
diff --git a/typeshed/stdlib/2and3/logging/handlers.pyi b/typeshed/stdlib/2and3/logging/handlers.pyi
index 2705d56..0dcaee9 100644
--- a/typeshed/stdlib/2and3/logging/handlers.pyi
+++ b/typeshed/stdlib/2and3/logging/handlers.pyi
@@ -29,7 +29,7 @@ class WatchedFileHandler(Handler):
 
 if sys.version_info >= (3,):
     class BaseRotatingHandler(FileHandler):
-        namer = ...  # type: Optional[Callable[[str], None]]
+        namer = ...  # type: Optional[Callable[[str], str]]
         rotator = ...  # type: Optional[Callable[[str, str], None]]
         def __init__(self, filename: str, mode: str,
                      encoding: Optional[str] = ...,
diff --git a/typeshed/stdlib/2and3/mmap.pyi b/typeshed/stdlib/2and3/mmap.pyi
index 87b1f04..933723b 100644
--- a/typeshed/stdlib/2and3/mmap.pyi
+++ b/typeshed/stdlib/2and3/mmap.pyi
@@ -1,21 +1,6 @@
-# Stubs for mmap
-
 import sys
-from types import TracebackType
-from typing import (Optional, Sequence, Union, Generic, TypeVar, overload,
-                    Iterable, Iterator, Sized, Type)
-
-
-_T = TypeVar('_T', str, bytes)
-
-# TODO already in PEP, have to get added to mypy
-_C = TypeVar('_C')
-class _ContextManager(Generic[_C]):
-    def __enter__(self) -> _C: ...
-    def __exit__(self, exc_type: Optional[Type[BaseException]],
-                 exc_val: Optional[Exception],
-                 exc_tb: Optional[TracebackType]) -> bool: ...
-
+from typing import (Optional, Sequence, Union, Generic, overload,
+                    Iterable, Iterator, Sized, ContextManager, AnyStr)
 
 ACCESS_READ = ...  # type: int
 ACCESS_WRITE = ...  # type: int
@@ -31,7 +16,7 @@ if sys.platform != 'win32':
 
     PAGESIZE = ...  # type: int
 
-class _mmap(Generic[_T]):
+class _mmap(Generic[AnyStr]):
     if sys.platform == 'win32':
         def __init__(self, fileno: int, length: int,
                      tagname: Optional[str] = ..., access: int = ...,
@@ -42,23 +27,23 @@ class _mmap(Generic[_T]):
                      prot: int = ..., access: int = ...,
                      offset: int = ...) -> None: ...
     def close(self) -> None: ...
-    def find(self, sub: _T,
+    def find(self, sub: AnyStr,
              start: int = ..., end: int = ...) -> int: ...
     def flush(self, offset: int = ..., size: int = ...) -> int: ...
     def move(self, dest: int, src: int, count: int) -> None: ...
-    def read(self, n: int = ...) -> _T: ...
-    def read_byte(self) -> _T: ...
-    def readline(self) -> _T: ...
+    def read(self, n: int = ...) -> AnyStr: ...
+    def read_byte(self) -> AnyStr: ...
+    def readline(self) -> AnyStr: ...
     def resize(self, newsize: int) -> None: ...
     def seek(self, pos: int, whence: int = ...) -> None: ...
     def size(self) -> int: ...
     def tell(self) -> int: ...
-    def write(self, bytes: _T) -> None: ...
-    def write_byte(self, byte: _T) -> None: ...
+    def write(self, bytes: AnyStr) -> None: ...
+    def write_byte(self, byte: AnyStr) -> None: ...
     def __len__(self) -> int: ...
 
 if sys.version_info >= (3,):
-    class mmap(_mmap, _ContextManager[mmap], Iterable[bytes], Sized):
+    class mmap(_mmap, ContextManager[mmap], Iterable[bytes], Sized):
         closed = ...  # type: bool
         def rfind(self, sub: bytes, start: int = ..., stop: int = ...) -> int: ...
         @overload
diff --git a/typeshed/stdlib/2and3/socket.pyi b/typeshed/stdlib/2and3/socket.pyi
index 37488fd..34ed452 100644
--- a/typeshed/stdlib/2and3/socket.pyi
+++ b/typeshed/stdlib/2and3/socket.pyi
@@ -555,7 +555,7 @@ class socket:
     @overload
     def sendto(self, data: bytes, flags: int, address: Union[tuple, str]) -> int: ...
     def setblocking(self, flag: bool) -> None: ...
-    def settimeout(self, value: Union[float, None]) -> None: ...
+    def settimeout(self, value: Optional[float]) -> None: ...
     def setsockopt(self, level: int, optname: int, value: Union[int, bytes]) -> None: ...
     def shutdown(self, how: int) -> None: ...
 
diff --git a/typeshed/stdlib/2and3/traceback.pyi b/typeshed/stdlib/2and3/traceback.pyi
index 2b83b5d..d53adfe 100644
--- a/typeshed/stdlib/2and3/traceback.pyi
+++ b/typeshed/stdlib/2and3/traceback.pyi
@@ -1,6 +1,6 @@
 # Stubs for traceback
 
-from typing import Generator, IO, Iterator, List, Mapping, Optional, Tuple, Type
+from typing import Any, Dict, Generator, IO, Iterator, List, Mapping, Optional, Tuple, Type
 from types import FrameType, TracebackType
 import sys
 
@@ -19,7 +19,8 @@ if sys.version_info >= (3,):
     def print_last(limit: Optional[int] = ..., file: Optional[IO[str]] = ...,
                    chain: bool = ...) -> None: ...
 else:
-    def print_exception(etype: Type[BaseException], value: BaseException,
+    def print_exception(etype: Optional[Type[BaseException]],
+                        value: Optional[BaseException],
                         tb: Optional[TracebackType], limit: Optional[int] = ...,
                         file: Optional[IO[str]] = ...) -> None: ...
     def print_exc(limit: Optional[int] = ...,
@@ -28,10 +29,17 @@ else:
                    file: Optional[IO[str]] = ...) -> None: ...
 def print_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ...,
                 file: Optional[IO[str]] = ...) -> None: ...
-def extract_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[_PT]: ...
-def extract_stack(f: Optional[FrameType] = ...,
-                  limit: Optional[int] = ...) -> List[_PT]: ...
-def format_list(extracted_list: List[_PT]) -> List[str]: ...
+
+if sys.version_info >= (3, 5):
+    def extract_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> StackSummary: ...
+    def extract_stack(f: Optional[FrameType] = ...,
+                      limit: Optional[int] = ...) -> StackSummary: ...
+    def format_list(extracted_list: List[FrameSummary]) -> List[str]: ...
+else:
+    def extract_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[_PT]: ...
+    def extract_stack(f: Optional[FrameType] = ...,
+                      limit: Optional[int] = ...) -> List[_PT]: ...
+    def format_list(extracted_list: List[_PT]) -> List[str]: ...
 def format_exception_only(etype: Type[BaseException],
                           value: BaseException) -> List[str]: ...
 if sys.version_info >= (3,):
@@ -40,8 +48,9 @@ if sys.version_info >= (3,):
                          chain: bool = ...) -> List[str]: ...
     def format_exc(limit: Optional[int] = ..., chain: bool = ...) -> str: ...
 else:
-    def format_exception(etype: Type[BaseException], value: BaseException,
-                         tb: TracebackType,
+    def format_exception(etype: Optional[Type[BaseException]],
+                         value: Optional[BaseException],
+                         tb: Optional[TracebackType],
                          limit: Optional[int] = ...) -> List[str]: ...
     def format_exc(limit: Optional[int] = ...) -> str: ...
 def format_tb(tb: Optional[TracebackType], limit: Optional[int] = ...) -> List[str]: ...
@@ -82,7 +91,22 @@ if sys.version_info >= (3, 5):
 
 
 if sys.version_info >= (3, 5):
-    class StackSummary:
+    class FrameSummary:
+        filename: str
+        lineno: int
+        name: str
+        line: str
+        locals: Optional[Dict[str, str]]
+        def __init__(self, filename: str, lineno: int, name: str,
+                     lookup_line: bool = ...,
+                     locals: Optional[Mapping[str, str]] = ...,
+                     line: Optional[int] = ...) -> None: ...
+        # TODO: more precise typing for __getitem__ and __iter__,
+        # for a namedtuple-like view on (filename, lineno, name, str).
+        def __getitem__(self, i: int) -> Any: ...
+        def __iter__(self) -> Iterator[Any]: ...
+
+    class StackSummary(List[FrameSummary]):
         @classmethod
         def extract(cls,
                     frame_gen: Generator[Tuple[FrameType, int], None, None],
@@ -90,11 +114,4 @@ if sys.version_info >= (3, 5):
                     capture_locals: bool = ...) -> StackSummary: ...
         @classmethod
         def from_list(cls, a_list: List[_PT]) -> StackSummary: ...
-
-
-if sys.version_info >= (3, 5):
-    class FrameSummary:
-        def __init__(self, filename: str, lineno: int, name: str,
-                     lookup_line: bool = ...,
-                     locals: Optional[Mapping[str, str]] = ...,
-                     line: Optional[int] = ...) -> None: ...
+        def format(self) -> List[str]: ...
diff --git a/typeshed/stdlib/3.4/asyncio/locks.pyi b/typeshed/stdlib/3.4/asyncio/locks.pyi
index 16cb02b..837c50c 100644
--- a/typeshed/stdlib/3.4/asyncio/locks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/locks.pyi
@@ -1,8 +1,8 @@
-from typing import Any, Callable, Generator, Iterable, Iterator, List, Type, TypeVar, Union, Optional
+from typing import Any, Callable, Generator, Iterable, Iterator, List, Type, TypeVar, Union, Optional, Awaitable
 
 from .coroutines import coroutine
 from .events import AbstractEventLoop
-from .futures import Future, Awaitable
+from .futures import Future
 from types import TracebackType
 
 _T = TypeVar('_T')
diff --git a/typeshed/stdlib/3.4/asyncio/streams.pyi b/typeshed/stdlib/3.4/asyncio/streams.pyi
index f8ed236..922552b 100644
--- a/typeshed/stdlib/3.4/asyncio/streams.pyi
+++ b/typeshed/stdlib/3.4/asyncio/streams.pyi
@@ -12,9 +12,12 @@ _ClientConnectedCallback = Callable[[StreamReader, StreamWriter], Optional[Await
 __all__: List[str]
 
 class IncompleteReadError(EOFError):
-    def __init__(self, partial: str, expected: int) -> None: ...
+    expected = ...  # type: Optional[int]
+    partial = ...  # type: bytes
+    def __init__(self, partial: bytes, expected: Optional[int]) -> None: ...
 
 class LimitOverrunError(Exception):
+    consumed = ...  # type: int
     def __init__(self, message: str, consumed: int) -> None: ...
 
 @coroutines.coroutine
@@ -22,7 +25,7 @@ def open_connection(
     host: str = ...,
     port: int = ...,
     *,
-    loop: events.AbstractEventLoop = ...,
+    loop: Optional[events.AbstractEventLoop] = ...,
     limit: int = ...,
     **kwds: Any
 ) -> Generator[Any, None, Tuple[StreamReader, StreamWriter]]: ...
@@ -33,7 +36,7 @@ def start_server(
     host: str = ...,
     port: int = ...,
     *,
-    loop: events.AbstractEventLoop = ...,
+    loop: Optional[events.AbstractEventLoop] = ...,
     limit: int = ...,
     **kwds: Any
 ) -> Generator[Any, None, events.AbstractServer]: ...
@@ -43,7 +46,7 @@ if sys.platform != 'win32':
     def open_unix_connection(
         path: str = ...,
         *,
-        loop: events.AbstractEventLoop = ...,
+        loop: Optional[events.AbstractEventLoop] = ...,
         limit: int = ...,
         **kwds: Any
     ) -> Generator[Any, None, Tuple[StreamReader, StreamWriter]]: ...
@@ -63,7 +66,7 @@ class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
     def __init__(self,
             stream_reader: StreamReader,
             client_connected_cb: _ClientConnectedCallback = ...,
-            loop: events.AbstractEventLoop = ...) -> None: ...
+            loop: Optional[events.AbstractEventLoop] = ...) -> None: ...
     def connection_made(self, transport: transports.BaseTransport) -> None: ...
     def connection_lost(self, exc: Exception) -> None: ...
     def data_received(self, data: bytes) -> None: ...
@@ -89,7 +92,7 @@ class StreamWriter:
 class StreamReader:
     def __init__(self,
             limit: int = ...,
-            loop: events.AbstractEventLoop = ...) -> None: ...
+            loop: Optional[events.AbstractEventLoop] = ...) -> None: ...
     def exception(self) -> Exception: ...
     def set_exception(self, exc: Exception) -> None: ...
     def set_transport(self, transport: transports.BaseTransport) -> None: ...
diff --git a/typeshed/stdlib/3.4/asyncio/tasks.pyi b/typeshed/stdlib/3.4/asyncio/tasks.pyi
index d50af2f..b17c321 100644
--- a/typeshed/stdlib/3.4/asyncio/tasks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/tasks.pyi
@@ -8,6 +8,11 @@ from .futures import Future
 __all__: List[str]
 
 _T = TypeVar('_T')
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
+_T3 = TypeVar('_T3')
+_T4 = TypeVar('_T4')
+_T5 = TypeVar('_T5')
 _FutureT = Union[Future[_T], Generator[Any, None, _T], Awaitable[_T]]
 
 FIRST_EXCEPTION = 'FIRST_EXCEPTION'
@@ -19,10 +24,28 @@ def as_completed(fs: Sequence[_FutureT[_T]], *, loop: AbstractEventLoop = ...,
 def ensure_future(coro_or_future: _FutureT[_T],
                   *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
 async = ensure_future
-# TODO: gather() should use variadic type vars instead of _TAny.
-_TAny = Any
-def gather(*coros_or_futures: _FutureT[_TAny],
-           loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[List[_TAny]]: ...
+ at overload
+def gather(coro_or_future1: _FutureT[_T1],
+           *, loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[_T1]]: ...
+ at overload
+def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2],
+           *, loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[_T1, _T2]]: ...
+ at overload
+def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3],
+           *, loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[_T1, _T2, _T3]]: ...
+ at overload
+def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3],
+           coro_or_future4: _FutureT[_T4],
+           *, loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[_T1, _T2, _T3, _T4]]: ...
+ at overload
+def gather(coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], coro_or_future3: _FutureT[_T3],
+           coro_or_future4: _FutureT[_T4], coro_or_future5: _FutureT[_T5],
+           *, loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[_T1, _T2, _T3, _T4, _T5]]: ...
+ at overload
+def gather(coro_or_future1: _FutureT[Any], coro_or_future2: _FutureT[Any], coro_or_future3: _FutureT[Any],
+           coro_or_future4: _FutureT[Any], coro_or_future5: _FutureT[Any], coro_or_future6: _FutureT[Any],
+           *coros_or_futures: _FutureT[Any],
+           loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[Tuple[Any, ...]]: ...
 def run_coroutine_threadsafe(coro: _FutureT[_T],
                              loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ...
 def shield(arg: _FutureT[_T], *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
diff --git a/typeshed/stdlib/3.4/enum.pyi b/typeshed/stdlib/3.4/enum.pyi
index dbb9df3..1501be2 100644
--- a/typeshed/stdlib/3.4/enum.pyi
+++ b/typeshed/stdlib/3.4/enum.pyi
@@ -1,10 +1,15 @@
 import sys
 from typing import List, Any, TypeVar, Union, Iterable, Iterator, TypeVar, Generic, Type, Sized, Reversible, Container, Mapping
+from abc import ABCMeta
 
 _T = TypeVar('_T', bound=Enum)
 _S = TypeVar('_S', bound=Type[Enum])
 
-class EnumMeta(type, Iterable[Enum], Sized, Reversible[Enum], Container[Enum]):
+# Note: EnumMeta actually subclasses type directly, not ABCMeta.
+# This is a temporary workaround to allow multiple creation of enums with builtins
+# such as str as mixins, which due to the handling of ABCs of builtin types, cause
+# spurious inconsistent metaclass structure. See #1595.
+class EnumMeta(ABCMeta, Iterable[Enum], Sized, Reversible[Enum], Container[Enum]):
     def __iter__(self: Type[_T]) -> Iterator[_T]: ...
     def __reversed__(self: Type[_T]) -> Iterator[_T]: ...
     def __contains__(self, member: Any) -> bool: ...
diff --git a/typeshed/stdlib/3/_threading_local.pyi b/typeshed/stdlib/3/_threading_local.pyi
new file mode 100644
index 0000000..a286d2d
--- /dev/null
+++ b/typeshed/stdlib/3/_threading_local.pyi
@@ -0,0 +1,18 @@
+# Source: https://github.com/python/cpython/blob/master/Lib/_threading_local.py
+from typing import Any, Dict, List, Tuple
+from weakref import ReferenceType
+
+__all__: List[str]
+localdict = Dict[Any, Any]
+
+class _localimpl:
+    key: str
+    dicts: Dict[int, Tuple[ReferenceType, localdict]]
+    def __init__(self) -> None: ...
+    def get_dict(self) -> localdict: ...
+    def create_dict(self) -> localdict: ...
+
+class local:
+    def __getattribute__(self, name: str) -> Any: ...
+    def __setattr__(self, name: str, value: Any) -> None: ...
+    def __delattr__(self, name: str) -> None: ...
diff --git a/typeshed/stdlib/3/ast.pyi b/typeshed/stdlib/3/ast.pyi
index 5345676..241f874 100644
--- a/typeshed/stdlib/3/ast.pyi
+++ b/typeshed/stdlib/3/ast.pyi
@@ -3,20 +3,7 @@
 import typing
 from typing import Any, Union, Iterator
 
-from _ast import (
-    Add, alias, And, arg, arguments, Assert, Assign, AST, AsyncFor,
-    AsyncFunctionDef, AsyncWith, Attribute, AugAssign, AugLoad, AugStore,
-    Await, BinOp, BitAnd, BitOr, BitXor, BoolOp, boolop, Break, Bytes, Call,
-    ClassDef, cmpop, Compare, comprehension, Continue, Del, Delete, Dict,
-    DictComp, Div, Ellipsis, Eq, ExceptHandler, Expr, expr, Expression,
-    expr_context, ExtSlice, FloorDiv, For, FunctionDef, GeneratorExp, Global,
-    Gt, GtE, If, IfExp, Import, ImportFrom, In, Index, Interactive, Invert, Is,
-    IsNot, keyword, Lambda, List, ListComp, Load, LShift, Lt, LtE, MatMult,
-    Mod, mod, Module, Mult, Name, NameConstant, Nonlocal, Not, NotEq, NotIn,
-    Num, operator, Or, Param, Pass, Pow, Raise, Return, RShift, Set, SetComp,
-    Slice, slice, Starred, stmt, Store, Str, Sub, Subscript, Suite, Try, Tuple,
-    UAdd, UnaryOp, unaryop, USub, While, With, withitem, Yield, YieldFrom
-)
+from _ast import *
 
 class NodeVisitor():
     def visit(self, node: AST) -> Any: ...
diff --git a/typeshed/stdlib/3/builtins.pyi b/typeshed/stdlib/3/builtins.pyi
index 50d9098..8d0dfb8 100644
--- a/typeshed/stdlib/3/builtins.pyi
+++ b/typeshed/stdlib/3/builtins.pyi
@@ -8,7 +8,7 @@ from typing import (
     ByteString, Optional, AnyStr, Type,
 )
 from abc import abstractmethod, ABCMeta
-from types import TracebackType
+from types import TracebackType, CodeType
 import sys
 from mypy_extensions import NoReturn
 
@@ -50,6 +50,7 @@ class object:
     def __sizeof__(self) -> int: ...
     def __reduce__(self) -> tuple: ...
     def __reduce_ex__(self, protocol: int) -> tuple: ...
+    def __dir__(self) -> Iterable[str]: ...
 
     if sys.version_info >= (3, 6):
         def __init_subclass__(cls) -> None: ...
@@ -104,9 +105,9 @@ class super:
 
 class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     @overload
-    def __init__(self, x: SupportsInt = ...) -> None: ...
+    def __init__(self, x: Union[str, bytes, SupportsInt] = ...) -> None: ...
     @overload
-    def __init__(self, x: Union[str, bytes], base: int = ...) -> None: ...
+    def __init__(self, x: Union[str, bytes], base: int) -> None: ...
 
     def bit_length(self) -> int: ...
     def to_bytes(self, length: int, byteorder: str, *, signed: bool = ...) -> bytes: ...
@@ -304,8 +305,6 @@ class str(Sequence[str]):
     def __iter__(self) -> Iterator[str]: ...
     def __str__(self) -> str: ...
     def __repr__(self) -> str: ...
-    def __int__(self) -> int: ...
-    def __float__(self) -> float: ...
     def __hash__(self) -> int: ...
 
 class bytes(ByteString):
@@ -576,7 +575,7 @@ class function:
     __name__ = ...  # type: str
     __qualname__ = ...  # type: str
     __module__ = ...  # type: str
-    __code__ = ...  # type: Any
+    __code__ = ...  # type: CodeType
     __annotations__ = ...  # type: Dict[str, Any]
 
 class list(MutableSequence[_T], Generic[_T]):
@@ -664,10 +663,10 @@ class set(MutableSet[_T], Generic[_T]):
     def add(self, element: _T) -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> Set[_T]: ...
-    def difference(self, *s: Iterable[Any]) -> Set[_T]: ...
-    def difference_update(self, *s: Iterable[Any]) -> None: ...
+    def difference(self, *s: Iterable[object]) -> Set[_T]: ...
+    def difference_update(self, *s: Iterable[object]) -> None: ...
     def discard(self, element: _T) -> None: ...
-    def intersection(self, *s: Iterable[Any]) -> Set[_T]: ...
+    def intersection(self, *s: Iterable[object]) -> Set[_T]: ...
     def intersection_update(self, *s: Iterable[Any]) -> None: ...
     def isdisjoint(self, s: Iterable[Any]) -> bool: ...
     def issubset(self, s: Iterable[Any]) -> bool: ...
@@ -682,28 +681,28 @@ class set(MutableSet[_T], Generic[_T]):
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_T]: ...
     def __str__(self) -> str: ...
-    def __and__(self, s: AbstractSet[Any]) -> Set[_T]: ...
-    def __iand__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+    def __and__(self, s: AbstractSet[object]) -> Set[_T]: ...
+    def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ...
     def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
     def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
-    def __sub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
-    def __isub__(self, s: AbstractSet[Any]) -> Set[_T]: ...
+    def __sub__(self, s: AbstractSet[object]) -> Set[_T]: ...
+    def __isub__(self, s: AbstractSet[object]) -> Set[_T]: ...
     def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
     def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
-    def __le__(self, s: AbstractSet[Any]) -> bool: ...
-    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
-    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
-    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __le__(self, s: AbstractSet[object]) -> bool: ...
+    def __lt__(self, s: AbstractSet[object]) -> bool: ...
+    def __ge__(self, s: AbstractSet[object]) -> bool: ...
+    def __gt__(self, s: AbstractSet[object]) -> bool: ...
     # TODO more set operations
 
 class frozenset(AbstractSet[_T], Generic[_T]):
     def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
     def copy(self) -> FrozenSet[_T]: ...
-    def difference(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
-    def intersection(self, *s: Iterable[Any]) -> FrozenSet[_T]: ...
+    def difference(self, *s: Iterable[object]) -> FrozenSet[_T]: ...
+    def intersection(self, *s: Iterable[object]) -> FrozenSet[_T]: ...
     def isdisjoint(self, s: Iterable[_T]) -> bool: ...
-    def issubset(self, s: Iterable[Any]) -> bool: ...
-    def issuperset(self, s: Iterable[Any]) -> bool: ...
+    def issubset(self, s: Iterable[object]) -> bool: ...
+    def issuperset(self, s: Iterable[object]) -> bool: ...
     def symmetric_difference(self, s: Iterable[_T]) -> FrozenSet[_T]: ...
     def union(self, *s: Iterable[_T]) -> FrozenSet[_T]: ...
     def __len__(self) -> int: ...
@@ -714,10 +713,10 @@ class frozenset(AbstractSet[_T], Generic[_T]):
     def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
     def __sub__(self, s: AbstractSet[_T]) -> FrozenSet[_T]: ...
     def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T, _S]]: ...
-    def __le__(self, s: AbstractSet[Any]) -> bool: ...
-    def __lt__(self, s: AbstractSet[Any]) -> bool: ...
-    def __ge__(self, s: AbstractSet[Any]) -> bool: ...
-    def __gt__(self, s: AbstractSet[Any]) -> bool: ...
+    def __le__(self, s: AbstractSet[object]) -> bool: ...
+    def __lt__(self, s: AbstractSet[object]) -> bool: ...
+    def __ge__(self, s: AbstractSet[object]) -> bool: ...
+    def __gt__(self, s: AbstractSet[object]) -> bool: ...
 
 class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
     def __init__(self, iterable: Iterable[_T], start: int = 0) -> None: ...
@@ -725,6 +724,9 @@ class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
     def __next__(self) -> Tuple[int, _T]: ...
 
 class range(Sequence[int]):
+    start = ...  # type: int
+    stop = ...  # type: int
+    step = ...  # type: int
     @overload
     def __init__(self, stop: int) -> None: ...
     @overload
@@ -759,24 +761,21 @@ class property:
 NotImplemented = ...  # type: Any
 
 def abs(n: SupportsAbs[_T]) -> _T: ...
-def all(i: Iterable) -> bool: ...
-def any(i: Iterable) -> bool: ...
+def all(i: Iterable[object]) -> bool: ...
+def any(i: Iterable[object]) -> bool: ...
 def ascii(o: object) -> str: ...
 def bin(number: int) -> str: ...
 def callable(o: object) -> bool: ...
 def chr(code: int) -> str: ...
-def compile(source: Any, filename: Union[str, bytes], mode: str, flags: int = 0,
-            dont_inherit: int = 0) -> Any: ...
+def compile(source: Any, filename: Union[str, bytes], mode: str, flags: int = 0, dont_inherit: int = 0) -> CodeType: ...
 def copyright() -> None: ...
 def credits() -> None: ...
 def delattr(o: Any, name: str) -> None: ...
 def dir(o: object = ...) -> List[str]: ...
 _N = TypeVar('_N', int, float)
 def divmod(a: _N, b: _N) -> Tuple[_N, _N]: ...
-def eval(source: str, globals: Optional[Dict[str, Any]] = None,
-         locals: Optional[Mapping[str, Any]] = None) -> Any: ...  # TODO code object as source
-def exec(object: str, globals: Optional[Dict[str, Any]] = None,
-         locals: Optional[Mapping[str, Any]] = None) -> Any: ...  # TODO code object as source
+def eval(source: Union[str, bytes, CodeType], globals: Optional[Dict[str, Any]] = None, locals: Optional[Mapping[str, Any]] = None) -> Any: ...
+def exec(object: Union[str, bytes, CodeType], globals: Optional[Dict[str, Any]] = None, locals: Optional[Mapping[str, Any]] = None) -> Any: ...
 def exit(code: Any = ...) -> NoReturn: ...
 @overload
 def filter(function: Optional[Callable[[_T], Any]],
@@ -900,7 +899,7 @@ class BaseException:
     __cause__ = ...  # type: BaseException
     __context__ = ...  # type: BaseException
     __traceback__ = ...  # type: TracebackType
-    def __init__(self, *args: Any) -> None: ...
+    def __init__(self, *args: object) -> None: ...
     def with_traceback(self, tb: Any) -> BaseException: ...
 
 class GeneratorExit(BaseException): ...
diff --git a/typeshed/stdlib/3/collections/__init__.pyi b/typeshed/stdlib/3/collections/__init__.pyi
index e10d861..5da5c47 100644
--- a/typeshed/stdlib/3/collections/__init__.pyi
+++ b/typeshed/stdlib/3/collections/__init__.pyi
@@ -59,9 +59,128 @@ else:
     def namedtuple(typename: str, field_names: Union[str, Iterable[str]],
                    verbose: bool = ..., rename: bool = ...) -> Type[tuple]: ...
 
-class UserDict(MutableMapping): ...
-class UserList(MutableSequence): ...
-class UserString(Sequence): ...
+_UserDictT = TypeVar('_UserDictT', bound=UserDict)
+
+class UserDict(MutableMapping[_KT, _VT]):
+    def __len__(self) -> int: ...
+    def __getitem__(self, key: _KT) -> _VT: ...
+    def __setitem__(self, key: _KT, item: _VT) -> None: ...
+    def __delitem__(self, key: _KT) -> None: ...
+    def __iter__(self) -> Iterator[_KT]: ...
+    def __contains__(self, key: object) -> bool: ...
+    def copy(self: _UserDictT) -> _UserDictT: ...
+    @classmethod
+    def fromkeys(cls: Type[_UserDictT], iterable: Iterable[_KT], value: Optional[_VT] = ...) -> _UserDictT: ...
+
+_UserListT = TypeVar('_UserListT', bound=UserList)
+
+class UserList(MutableSequence[_T]):
+    def __init__(self, initlist: Optional[Iterable[_T]] = ...) -> None: ...
+    def __lt__(self, other: object) -> bool: ...
+    def __le__(self, other: object) -> bool: ...
+    def __gt__(self, other: object) -> bool: ...
+    def __ge__(self, other: object) -> bool: ...
+    def __contains__(self, item: object) -> bool: ...
+    def __len__(self) -> int: ...
+    @overload
+    def __getitem__(self, i: int) -> _T: ...
+    @overload
+    def __getitem__(self, i: slice) -> Sequence[_T]: ...
+    @overload
+    def __setitem__(self, i: int, o: _T) -> None: ...
+    @overload
+    def __setitem__(self, i: slice, o: Iterable[_T]) -> None: ...
+    def __delitem__(self, i: Union[int, slice]) -> None: ...
+    def __add__(self: _UserListT, other: Iterable[_T]) -> _UserListT: ...
+    def __iadd__(self: _UserListT, other: Iterable[_T]) -> _UserListT: ...
+    def __mul__(self: _UserListT, n: int) -> _UserListT: ...
+    def __imul__(self: _UserListT, n: int) -> _UserListT: ...
+    def append(self, item: _T) -> None: ...
+    def insert(self, i: int, item: _T) -> None: ...
+    def pop(self, i: int = ...) -> _T: ...
+    def remove(self, item: _T) -> None: ...
+    def clear(self) -> None: ...
+    def copy(self: _UserListT) -> _UserListT: ...
+    def count(self, item: _T) -> int: ...
+    def index(self, item: _T, *args: Any) -> int: ...
+    def reverse(self) -> None: ...
+    def sort(self, *args: Any, **kwds: Any) -> None: ...
+    def extend(self, other: Iterable[_T]) -> None: ...
+
+_UserStringT = TypeVar('_UserStringT', bound=UserString)
+
+class UserString(Sequence[str]):
+    def __init__(self, seq: object) -> None: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __complex__(self) -> complex: ...
+    if sys.version_info >= (3, 5):
+        def __getnewargs__(self) -> Tuple[str]: ...
+    def __lt__(self, string: Union[str, UserString]) -> bool: ...
+    def __le__(self, string: Union[str, UserString]) -> bool: ...
+    def __gt__(self, string: Union[str, UserString]) -> bool: ...
+    def __ge__(self, string: Union[str, UserString]) -> bool: ...
+    def __contains__(self, char: object) -> bool: ...
+    def __len__(self) -> int: ...
+    # It should return a str to implement Sequence correctly, but it doesn't.
+    def __getitem__(self: _UserStringT, i: Union[int, slice]) -> _UserStringT: ...  # type: ignore
+    def __add__(self: _UserStringT, other: object) -> _UserStringT: ...
+    def __mul__(self: _UserStringT, n: int) -> _UserStringT: ...
+    def __mod__(self: _UserStringT, args: Any) -> _UserStringT: ...
+    def capitalize(self: _UserStringT) -> _UserStringT: ...
+    if sys.version_info >= (3, 5):
+        def casefold(self: _UserStringT) -> _UserStringT: ...
+    def center(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ...
+    def count(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ...
+    def encode(self: _UserStringT, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> _UserStringT: ...
+    def endswith(self, suffix: Union[str, Tuple[str, ...]], start: int = ..., end: int = ...) -> bool: ...
+    def expandtabs(self: _UserStringT, tabsize: int = ...) -> _UserStringT: ...
+    def find(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ...
+    def format(self, *args: Any, **kwds: Any) -> str: ...
+    if sys.version_info >= (3, 5):
+        def format_map(self, mapping: Mapping[str, Any]) -> str: ...
+    def index(self, sub: str, start: int = ..., end: int = ...) -> int: ...
+    def isalpha(self) -> bool: ...
+    def isalnum(self) -> bool: ...
+    def isdecimal(self) -> bool: ...
+    def isdigit(self) -> bool: ...
+    def isidentifier(self) -> bool: ...
+    def islower(self) -> bool: ...
+    def isnumeric(self) -> bool: ...
+    if sys.version_info >= (3, 5):
+        def isprintable(self) -> bool: ...
+    def isspace(self) -> bool: ...
+    def istitle(self) -> bool: ...
+    def isupper(self) -> bool: ...
+    def join(self, seq: Iterable[str]) -> str: ...
+    def ljust(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ...
+    def lower(self: _UserStringT) -> _UserStringT: ...
+    def lstrip(self: _UserStringT, chars: Optional[str] = ...) -> _UserStringT: ...
+    if sys.version_info >= (3, 5):
+        @staticmethod
+        @overload
+        def maketrans(x: Union[Dict[int, Any], Dict[str, Any]]) -> Dict[int, Any]: ...
+        @staticmethod
+        @overload
+        def maketrans(x: str, y: str, z: str = ...) -> Dict[int, Any]: ...
+    def partition(self, sep: str) -> Tuple[str, str, str]: ...
+    def replace(self: _UserStringT, old: Union[str, UserString], new: Union[str, UserString], maxsplit: int = ...) -> _UserStringT: ...
+    def rfind(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ...
+    def rindex(self, sub: Union[str, UserString], start: int = ..., end: int = ...) -> int: ...
+    def rjust(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ...
+    def rpartition(self, sep: str) -> Tuple[str, str, str]: ...
+    def rstrip(self: _UserStringT, chars: Optional[str] = ...) -> _UserStringT: ...
+    def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
+    def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
+    def splitlines(self, keepends: bool = ...) -> List[str]: ...
+    def startswith(self, prefix: Union[str, Tuple[str, ...]], start: int = ..., end: int = ...) -> bool: ...
+    def strip(self: _UserStringT, chars: Optional[str] = ...) -> _UserStringT: ...
+    def swapcase(self: _UserStringT) -> _UserStringT: ...
+    def title(self: _UserStringT) -> _UserStringT: ...
+    def translate(self: _UserStringT, *args: Any) -> _UserStringT: ...
+    def upper(self: _UserStringT) -> _UserStringT: ...
+    def zfill(self: _UserStringT, width: int) -> _UserStringT: ...
+
 
 # Technically, deque only derives from MutableSequence in 3.5.
 # But in practice it's not worth losing sleep over.
@@ -125,7 +244,7 @@ class Counter(Dict[_T, int], Generic[_T]):
 
     def elements(self) -> Iterator[_T]: ...
 
-    def most_common(self, n: int = ...) -> List[_T]: ...
+    def most_common(self, n: Optional[int] = ...) -> List[Tuple[_T, int]]: ...
 
     @overload
     def subtract(self, __mapping: Mapping[_T, int]) -> None: ...
diff --git a/typeshed/stdlib/3/configparser.pyi b/typeshed/stdlib/3/configparser.pyi
index ff90651..33d6fce 100644
--- a/typeshed/stdlib/3/configparser.pyi
+++ b/typeshed/stdlib/3/configparser.pyi
@@ -1,16 +1,27 @@
 # Based on http://docs.python.org/3.5/library/configparser.html and on
 # reading configparser.py.
 
-from typing import (MutableMapping, Mapping, Dict, Sequence, List, Union,
-                    Iterable, Iterator, Callable, Any, IO, overload, Optional, Pattern)
+import sys
+from typing import (AbstractSet, MutableMapping, Mapping, Dict, Sequence, List,
+                    Union, Iterable, Iterator, Callable, Any, IO, overload,
+                    Optional, Pattern, Type, TypeVar)
 # Types only used in type comments only
 from typing import Optional, Tuple  # noqa
 
+if sys.version_info >= (3, 6):
+    from os import PathLike
+
 # Internal type aliases
 _section = Mapping[str, str]
 _parser = MutableMapping[str, _section]
 _converter = Callable[[str], Any]
 _converters = Dict[str, _converter]
+_T = TypeVar('_T')
+
+if sys.version_info >= (3, 6):
+    _Path = Union[str, PathLike[str]]
+else:
+    _Path = str
 
 DEFAULTSECT: str
 MAX_INTERPOLATION_DEPTH: int
@@ -46,7 +57,7 @@ class LegacyInterpolation(Interpolation): ...
 class RawConfigParser(_parser):
     def __init__(self,
                  defaults: Optional[_section] = ...,
-                 dict_type: Mapping[str, str] = ...,
+                 dict_type: Type[Mapping[str, str]] = ...,
                  allow_no_value: bool = ...,
                  *,
                  delimiters: Sequence[str] = ...,
@@ -59,7 +70,7 @@ class RawConfigParser(_parser):
 
     def __len__(self) -> int: ...
 
-    def __getitem__(self, section: str) -> _section: ...
+    def __getitem__(self, section: str) -> SectionProxy: ...
 
     def __setitem__(self, section: str, options: _section) -> None: ...
 
@@ -79,7 +90,7 @@ class RawConfigParser(_parser):
 
     def has_option(self, section: str, option: str) -> bool: ...
 
-    def read(self, filenames: Union[str, Sequence[str]],
+    def read(self, filenames: Union[_Path, Iterable[_Path]],
              encoding: Optional[str] = None) -> List[str]: ...
 
     def read_file(self, f: Iterable[str], source: Optional[str] = None) -> None: ...
@@ -89,18 +100,25 @@ class RawConfigParser(_parser):
     def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]],
                   source: str = ...) -> None: ...
 
+    # These get* methods are partially applied (with the same names) in
+    # SectionProxy; the stubs should be kept updated together
     def getint(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: int = ...) -> int: ...
 
     def getfloat(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: float = ...) -> float: ...
 
     def getboolean(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: bool = ...) -> bool: ...
 
+    def _get_conv(self, section: str, option: str, conv: Callable[[str], _T], *, raw: bool = ..., vars: _section = ..., fallback: _T = ...) -> _T: ...
+
     # This is incompatible with MutableMapping so we ignore the type
     def get(self, section: str, option: str, *, raw: bool = ..., vars: _section = ..., fallback: str = ...) -> str:  # type: ignore
         ...
 
-    # This is incompatible with Mapping so we ignore the type.
-    def items(self, section: str = ..., raw: bool = ..., vars: _section = ...) -> Iterable[Tuple[str, _section]]: ...  # type: ignore
+    @overload
+    def items(self, *, raw: bool = ..., vars: _section = ...) -> AbstractSet[Tuple[str, SectionProxy]]: ...
+
+    @overload
+    def items(self, section: str, raw: bool = ..., vars: _section = ...) -> List[Tuple[str, str]]: ...
 
     def set(self, section: str, option: str, value: str) -> None: ...
 
@@ -144,6 +162,13 @@ class SectionProxy(MutableMapping[str, str]):
     @property
     def name(self) -> str: ...
     def get(self, option: str, fallback: Optional[str] = ..., *, raw: bool = ..., vars: Optional[_section] = ..., **kwargs: Any) -> str: ...  # type: ignore
+
+    # These are partially-applied version of the methods with the same names in
+    # RawConfigParser; the stubs should be kept updated together
+    def getint(self, option: str, *, raw: bool = ..., vars: _section = ..., fallback: int = ...) -> int: ...
+    def getfloat(self, option: str, *, raw: bool = ..., vars: _section = ..., fallback: float = ...) -> float: ...
+    def getboolean(self, option: str, *, raw: bool = ..., vars: _section = ..., fallback: bool = ...) -> bool: ...
+
     # SectionProxy can have arbitrary attributes when custon converters are used
     def __getattr__(self, key: str) -> Callable[..., Any]: ...
 
diff --git a/typeshed/stdlib/3/email/mime/__init__.py b/typeshed/stdlib/3/email/mime/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/pinfer/__init__.py b/typeshed/stdlib/3/email/mime/__init__.pyi
similarity index 100%
rename from pinfer/__init__.py
rename to typeshed/stdlib/3/email/mime/__init__.pyi
diff --git a/typeshed/stdlib/3/email/policy.pyi b/typeshed/stdlib/3/email/policy.pyi
index 97f0c51..6152002 100644
--- a/typeshed/stdlib/3/email/policy.pyi
+++ b/typeshed/stdlib/3/email/policy.pyi
@@ -42,8 +42,7 @@ if sys.version_info >= (3, 3):
         def header_source_parse(self, sourcelines: List[str]) -> str: ...
         def header_store_parse(self, name: str,
                                value: str) -> Tuple[str, str]: ...
-        def header_fetch_parse(self, name: str,  # type: ignore
-                               value: str) -> Union[str, Header]: ...
+        def header_fetch_parse(self, name: str, value: str) -> Union[str, Header]: ...  # type: ignore
         def fold(self, name: str, value: str) -> str: ...
         def fold_binary(self, name: str, value: str) -> bytes: ...
 
diff --git a/typeshed/stdlib/3/email/utils.pyi b/typeshed/stdlib/3/email/utils.pyi
index efc93c1..6c0a183 100644
--- a/typeshed/stdlib/3/email/utils.pyi
+++ b/typeshed/stdlib/3/email/utils.pyi
@@ -9,8 +9,8 @@ _PDTZ = Tuple[int, int, int, int, int, int, int, int, int, Optional[int]]
 
 def quote(str: str) -> str: ...
 def unquote(str: str) -> str: ...
-def parseaddr(address: str) -> Tuple[str, str]: ...
-def formataddr(pair: Tuple[str, str],
+def parseaddr(address: Optional[str]) -> Tuple[str, str]: ...
+def formataddr(pair: Tuple[Optional[str], str],
                charset: Union[str, Charset] = ...) -> str: ...
 def getaddresses(fieldvalues: List[str]) -> List[Tuple[str, str]]: ...
 def parsedate(date: str) -> Optional[Tuple[int, int, int, int, int, int, int, int, int]]: ...
diff --git a/typeshed/stdlib/3/encodings/utf_8.pyi b/typeshed/stdlib/3/encodings/utf_8.pyi
index 0111184..d38bd58 100644
--- a/typeshed/stdlib/3/encodings/utf_8.pyi
+++ b/typeshed/stdlib/3/encodings/utf_8.pyi
@@ -1,14 +1,15 @@
 import codecs
+from typing import Text, Tuple
 
 class IncrementalEncoder(codecs.IncrementalEncoder):
-    pass
+    def encode(self, input: Text, final: bool = ...) -> bytes: ...
+
 class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
-    pass
-class StreamWriter(codecs.StreamWriter):
-    pass
-class StreamReader(codecs.StreamReader):
-    pass
+    def _buffer_decode(self, input: bytes, errors: str, final: bool) -> Tuple[Text, int]: ...
+
+class StreamWriter(codecs.StreamWriter): ...
+class StreamReader(codecs.StreamReader): ...
 
 def getregentry() -> codecs.CodecInfo: ...
-def encode(input: str, errors: str = ...) -> bytes: ...
-def decode(input: bytes, errors: str = ...) -> str: ...
+def encode(input: Text, errors: Text = ...) -> bytes: ...
+def decode(input: bytes, errors: Text = ...) -> Text: ...
diff --git a/typeshed/stdlib/3/fcntl.pyi b/typeshed/stdlib/3/fcntl.pyi
index a50fa0d..1ff20d6 100644
--- a/typeshed/stdlib/3/fcntl.pyi
+++ b/typeshed/stdlib/3/fcntl.pyi
@@ -1,6 +1,6 @@
 # Stubs for fcntl
+from io import IOBase
 from typing import Any, IO, Union
-import typing
 
 FASYNC = ...  # type: int
 FD_CLOEXEC = ...  # type: int
@@ -75,7 +75,7 @@ LOCK_SH = ...  # type: int
 LOCK_UN = ...  # type: int
 LOCK_WRITE = ...  # type: int
 
-_AnyFile = Union[int, IO[Any]]
+_AnyFile = Union[int, IO[Any], IOBase]
 
 # TODO All these return either int or bytes depending on the value of
 # cmd (not on the type of arg).
diff --git a/typeshed/stdlib/3/functools.pyi b/typeshed/stdlib/3/functools.pyi
index be88c28..7f048dc 100644
--- a/typeshed/stdlib/3/functools.pyi
+++ b/typeshed/stdlib/3/functools.pyi
@@ -34,7 +34,7 @@ WRAPPER_ASSIGNMENTS = ...  # type: Sequence[str]
 WRAPPER_UPDATES = ...  # type: Sequence[str]
 
 def update_wrapper(wrapper: _AnyCallable, wrapped: _AnyCallable, assigned: Sequence[str] = ...,
-                   updated: Sequence[str] = ...) -> None: ...
+                   updated: Sequence[str] = ...) -> _AnyCallable: ...
 def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[_AnyCallable], _AnyCallable]: ...
 def total_ordering(cls: type) -> type: ...
 def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], Any]: ...
diff --git a/typeshed/stdlib/3/gzip.pyi b/typeshed/stdlib/3/gzip.pyi
index c4420fb..8a6f43e 100644
--- a/typeshed/stdlib/3/gzip.pyi
+++ b/typeshed/stdlib/3/gzip.pyi
@@ -1,13 +1,13 @@
-from typing import Any
+from typing import Any, Optional
 import _compression
 
-def open(filename, mode='', compresslevel=9, encoding=None, errors=None, newline=None): ...
+def open(filename, mode: str = ..., compresslevel: int = ..., encoding=None, errors=None, newline=None): ...
 
 class _PaddedFile:
     file = ...  # type: Any
-    def __init__(self, f, prepend=b''): ...
+    def __init__(self, f, prepend: bytes = ...) -> None: ...
     def read(self, size): ...
-    def prepend(self, prepend=b''): ...
+    def prepend(self, prepend: bytes = ...): ...
     def seek(self, off): ...
     def seekable(self): ...
 
@@ -17,15 +17,15 @@ class GzipFile(_compression.BaseStream):
     name = ...  # type: Any
     compress = ...  # type: Any
     fileobj = ...  # type: Any
-    def __init__(self, filename=None, mode=None, compresslevel=9, fileobj=None, mtime=None): ...
+    def __init__(self, filename=None, mode=None, compresslevel: int = ..., fileobj=None, mtime=None) -> None: ...
     @property
     def filename(self): ...
     @property
     def mtime(self): ...
     crc = ...  # type: Any
     def write(self, data): ...
-    def read(self, size=-1): ...
-    def read1(self, size=-1): ...
+    def read(self, size: Optional[int] = ...): ...
+    def read1(self, size: int = ...): ...
     def peek(self, n): ...
     @property
     def closed(self): ...
@@ -37,11 +37,11 @@ class GzipFile(_compression.BaseStream):
     def writable(self): ...
     def seekable(self): ...
     def seek(self, offset, whence=...): ...
-    def readline(self, size=-1): ...
+    def readline(self, size: int = ...): ...
 
 class _GzipReader(_compression.DecompressReader):
-    def __init__(self, fp): ...
-    def read(self, size=-1): ...
+    def __init__(self, fp) -> None: ...
+    def read(self, size: int = ...): ...
 
-def compress(data, compresslevel=9): ...
+def compress(data, compresslevel: int = ...): ...
 def decompress(data): ...
diff --git a/typeshed/stdlib/3/heapq.pyi b/typeshed/stdlib/3/heapq.pyi
index f56ae72..5c49dfa 100644
--- a/typeshed/stdlib/3/heapq.pyi
+++ b/typeshed/stdlib/3/heapq.pyi
@@ -3,7 +3,7 @@
 # Based on http://docs.python.org/3.2/library/heapq.html
 
 import sys
-from typing import TypeVar, List, Iterable, Any, Callable
+from typing import TypeVar, List, Iterable, Any, Callable, Optional
 
 _T = TypeVar('_T')
 
@@ -18,6 +18,6 @@ if sys.version_info >= (3, 5):
 else:
     def merge(*iterables: Iterable[_T]) -> Iterable[_T]: ...
 def nlargest(n: int, iterable: Iterable[_T],
-             key: Callable[[_T], Any] = ...) -> List[_T]: ...
+             key: Optional[Callable[[_T], Any]] = ...) -> List[_T]: ...
 def nsmallest(n: int, iterable: Iterable[_T],
               key: Callable[[_T], Any] = ...) -> List[_T]: ...
diff --git a/typeshed/stdlib/3/http/cookiejar.pyi b/typeshed/stdlib/3/http/cookiejar.pyi
index 801a5a6..3cacc27 100644
--- a/typeshed/stdlib/3/http/cookiejar.pyi
+++ b/typeshed/stdlib/3/http/cookiejar.pyi
@@ -1,5 +1,3 @@
-# Stubs for http.cookiejar (Python 3.4)
-
 from typing import Iterable, Iterator, Optional, Sequence, Tuple, TypeVar, Union, overload
 from http.client import HTTPResponse
 import sys
@@ -28,6 +26,7 @@ class CookieJar(Iterable['Cookie']):
               name: str = ...) -> None: ...
     def clear_session_cookies(self) -> None: ...
     def __iter__(self) -> Iterator['Cookie']: ...
+    def __len__(self) -> int: ...
 
 class FileCookieJar(CookieJar):
     filename = ...  # type: str
diff --git a/typeshed/stdlib/3/io.pyi b/typeshed/stdlib/3/io.pyi
index 4b1deb7..20c63ef 100644
--- a/typeshed/stdlib/3/io.pyi
+++ b/typeshed/stdlib/3/io.pyi
@@ -1,5 +1,3 @@
-# Stubs for io
-
 from typing import (
     List, BinaryIO, TextIO, Iterator, Union, Optional, Callable, Tuple, Any, IO, Iterable
 )
@@ -7,6 +5,7 @@ import builtins
 import codecs
 import sys
 from types import TracebackType
+from typing import TypeVar
 
 DEFAULT_BUFFER_SIZE = ...  # type: int
 
@@ -14,26 +13,22 @@ SEEK_SET = ...  # type: int
 SEEK_CUR = ...  # type: int
 SEEK_END = ...  # type: int
 
-open = builtins.open
+_T = TypeVar('_T', bound='IOBase')
 
-# FIXME when mypy handle condtional, we can uncomment the next block and remove
-# the temporary fix
-# if sys.version_info >= (3, 3):
-#     BlockingIOError = BlockingIOError
-#     class UnsupportedOperation(OSError, ValueError): ...
-# else:
-#     class BlockingIOError(IOError):
-#         characters_written = ...  # type: int
-#     class UnsupportedOperation(IOError, ValueError): ...
-class BlockingIOError(OSError):
-    characters_written = ...  # type: int
-class UnsupportedOperation(OSError, ValueError): ...
+open = builtins.open
 
+if sys.version_info >= (3, 3):
+    BlockingIOError = builtins.BlockingIOError
+    class UnsupportedOperation(OSError, ValueError): ...
+else:
+    class BlockingIOError(IOError):
+        characters_written: int
+    class UnsupportedOperation(IOError, ValueError): ...
 
 class IOBase:
     def __iter__(self) -> Iterator[bytes]: ...
     def __next__(self) -> bytes: ...
-    def __enter__(self) -> 'IOBase': ...
+    def __enter__(self: _T) -> _T: ...
     def __exit__(self, exc_type: Optional[type], exc_val: Optional[Exception],
                  exc_tb: Optional[TracebackType]) -> bool: ...
     def close(self) -> None: ...
@@ -172,7 +167,6 @@ class TextIOBase(IOBase):
     newlines = ...  # type: Union[str, Tuple[str, ...], None]
     def __iter__(self) -> Iterator[str]: ...  # type: ignore
     def __next__(self) -> str: ...  # type: ignore
-    def __enter__(self) -> 'TextIOBase': ...
     def detach(self) -> IOBase: ...
     def write(self, s: str) -> int: ...
     if sys.version_info >= (3, 4):
@@ -257,4 +251,5 @@ class StringIO(TextIOWrapper):
     def getvalue(self) -> str: ...
     def __enter__(self) -> 'StringIO': ...
 
-class IncrementalNewlineDecoder(codecs.IncrementalDecoder): ...
+class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
+    def decode(self, input: codecs._encoded, final: bool = ...) -> codecs._decoded: ...
diff --git a/typeshed/stdlib/3/json/__init__.pyi b/typeshed/stdlib/3/json/__init__.pyi
index ccd121d..c68705d 100644
--- a/typeshed/stdlib/3/json/__init__.pyi
+++ b/typeshed/stdlib/3/json/__init__.pyi
@@ -1,10 +1,10 @@
 import sys
 from typing import Any, IO, Optional, Tuple, Callable, Dict, List, Union
 
-from .decoder import JSONDecoder
-from .encoder import JSONEncoder
+from .decoder import JSONDecoder as JSONDecoder
+from .encoder import JSONEncoder as JSONEncoder
 if sys.version_info >= (3, 5):
-    from .decoder import JSONDecodeError
+    from .decoder import JSONDecodeError as JSONDecodeError
 
 def dumps(obj: Any,
     skipkeys: bool = ...,
diff --git a/typeshed/stdlib/3/json/decoder.pyi b/typeshed/stdlib/3/json/decoder.pyi
index 1c3e9a0..164fcad 100644
--- a/typeshed/stdlib/3/json/decoder.pyi
+++ b/typeshed/stdlib/3/json/decoder.pyi
@@ -11,12 +11,12 @@ if sys.version_info >= (3, 5):
         def __init__(self, msg: str, doc: str, pos: int) -> None: ...
 
 class JSONDecoder:
-    object_hook = None  # type: Callable[[Dict[str, Any]], Any]
-    parse_float = ...  # Callable[[str], Any]
-    parse_int = ...  # Callable[[str], Any]
+    object_hook = ...  # type: Callable[[Dict[str, Any]], Any]
+    parse_float = ...  # type: Callable[[str], Any]
+    parse_int = ...  # type: Callable[[str], Any]
     parse_constant = ...  # Callable[[str], Any]
     strict = ...  # type: bool
-    object_pairs_hook = None  # type: Callable[[List[Tuple[str, Any]]], Any]
+    object_pairs_hook = ...  # type: Callable[[List[Tuple[str, Any]]], Any]
 
     def __init__(self, object_hook: Optional[Callable[[Dict[str, Any]], Any]] = None,
             parse_float: Optional[Callable[[str], Any]] = None,
diff --git a/typeshed/stdlib/3/json/encoder.pyi b/typeshed/stdlib/3/json/encoder.pyi
index c423c18..ced7168 100644
--- a/typeshed/stdlib/3/json/encoder.pyi
+++ b/typeshed/stdlib/3/json/encoder.pyi
@@ -9,7 +9,7 @@ class JSONEncoder:
     check_circular = ...  # type: bool
     allow_nan = ...  # type: bool
     sort_keys = ...  # type: bool
-    indent = None  # type: int
+    indent = ...  # type: int
 
     def __init__(self, skipkeys: bool = ..., ensure_ascii: bool = ...,
             check_circular: bool = ..., allow_nan: bool = ..., sort_keys: bool = ...,
diff --git a/typeshed/stdlib/3/multiprocessing/__init__.pyi b/typeshed/stdlib/3/multiprocessing/__init__.pyi
index 167353a..59b82da 100644
--- a/typeshed/stdlib/3/multiprocessing/__init__.pyi
+++ b/typeshed/stdlib/3/multiprocessing/__init__.pyi
@@ -1,19 +1,24 @@
 # Stubs for multiprocessing
 
-from typing import Any, Callable, Iterable, Mapping, Optional, Dict, List, Union
+from typing import (
+    Any, Callable, ContextManager, Iterable, Mapping, Optional, Dict, List,
+    Union, TypeVar,
+)
 
 from logging import Logger
+from multiprocessing import pool
 from multiprocessing.context import BaseContext
 from multiprocessing.managers import SyncManager
 from multiprocessing.pool import AsyncResult
 from multiprocessing.process import current_process as current_process
 import sys
+import queue
 
-class Lock():
+_T = TypeVar('_T')
+
+class Lock(ContextManager[Lock]):
     def acquire(self, block: bool = ..., timeout: int = ...) -> None: ...
     def release(self) -> None: ...
-    def __enter__(self) -> 'Lock': ...
-    def __exit__(self, exc_type, exc_value, tb) -> None: ...
 
 class Event(object):
     def __init__(self, *, ctx: BaseContext) -> None: ...
@@ -22,84 +27,51 @@ class Event(object):
     def clear(self) -> None: ...
     def wait(self, timeout: Optional[int] = ...) -> bool: ...
 
-class Pool():
-    def __init__(self, processes: Optional[int] = ...,
-                 initializer: Optional[Callable[..., None]] = ...,
-                 initargs: Iterable[Any] = ...,
-                 maxtasksperchild: Optional[int] = ...,
-                 context: Optional[Any] = None) -> None: ...
-    def apply(self,
-              func: Callable[..., Any],
-              args: Iterable[Any] = ...,
-              kwds: Dict[str, Any]=...) -> Any: ...
-    def apply_async(self,
-                func: Callable[..., Any],
-                args: Iterable[Any] = ...,
-                kwds: Dict[str, Any] = ...,
-                callback: Optional[Callable[..., None]] = None,
-                error_callback: Optional[Callable[[BaseException], None]] = None) -> AsyncResult: ...
-    def map(self,
-            func: Callable[..., Any],
-            iterable: Iterable[Any] = ...,
-            chunksize: Optional[int] = ...) -> List[Any]: ...
-    def map_async(self, func: Callable[..., Any],
-                  iterable: Iterable[Any] = ...,
-                  chunksize: Optional[int] = ...,
-                  callback: Optional[Callable[..., None]] = None,
-                  error_callback: Optional[Callable[[BaseException], None]] = None) -> AsyncResult: ...
-    def imap(self,
-             func: Callable[..., Any],
-             iterable: Iterable[Any] = ...,
-             chunksize: Optional[int] = None) -> Iterable[Any]: ...
-    def imap_unordered(self,
-                       func: Callable[..., Any],
-                       iterable: Iterable[Any] = ...,
-                       chunksize: Optional[int] = None) -> Iterable[Any]: ...
-    def starmap(self,
-                func: Callable[..., Any],
-                iterable: Iterable[Iterable[Any]] = ...,
-                chunksize: Optional[int] = None) -> List[Any]: ...
-    def starmap_async(self,
-                      func: Callable[..., Any],
-                      iterable: Iterable[Iterable[Any]] = ...,
-                      chunksize: Optional[int] = ...,
-                      callback: Optional[Callable[..., None]] = None,
-                      error_callback: Optional[Callable[[BaseException], None]] = None) -> AsyncResult: ...
-    def close(self) -> None: ...
-    def terminate(self) -> None: ...
-    def join(self) -> None: ...
-    def __enter__(self) -> 'Pool': ...
-    def __exit__(self, exc_type, exc_val, exc_tb) -> None: ...
+# N.B. This is generated at runtime by partially applying
+# multiprocessing.context.BaseContext.Pool, so the two signatures should be
+# identical (modulo self).
+def Pool(processes: Optional[int] = ...,
+         initializer: Optional[Callable[..., Any]] = ...,
+         initargs: Iterable[Any] = ...,
+         maxtasksperchild: Optional[int] = ...) -> pool.Pool: ...
 
 class Process():
+    name: str
+    daemon: bool
+    pid: Optional[int]
+    exitcode: Optional[int]
+    authkey: bytes
+    sentinel: int
     # TODO: set type of group to None
     def __init__(self,
                  group: Any = ...,
-                 target: Callable = ...,
-                 name: str = ...,
+                 target: Optional[Callable] = ...,
+                 name: Optional[str] = ...,
                  args: Iterable[Any] = ...,
                  kwargs: Mapping[Any, Any] = ...,
-                 daemon: bool = ...) -> None: ...
+                 *,
+                 daemon: Optional[bool] = ...) -> None: ...
     def start(self) -> None: ...
     def run(self) -> None: ...
     def terminate(self) -> None: ...
     def is_alive(self) -> bool: ...
-    def join(self, timeout: float = ...) -> None: ...
+    def join(self, timeout: Optional[float] = ...) -> None: ...
 
-class Queue():
+class Queue(queue.Queue[_T]):
     def __init__(self, maxsize: int = ...) -> None: ...
-    def get(self, block: bool = ..., timeout: float = ...) -> Any: ...
-    def put(self, item: Any, block: bool = ..., timeout: float = ...) -> None: ...
+    def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ...
+    def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ...
     def qsize(self) -> int: ...
     def empty(self) -> bool: ...
     def full(self) -> bool: ...
-    def put_nowait(self, item: Any) -> None: ...
-    def get_nowait(self) -> Any: ...
+    def put_nowait(self, item: _T) -> None: ...
+    def get_nowait(self) -> _T: ...
     def close(self) -> None: ...
     def join_thread(self) -> None: ...
     def cancel_join_thread(self) -> None: ...
 
 class Value():
+    value: Any = ...
     def __init__(self, typecode_or_type: str, *args: Any, lock: bool = ...) -> None: ...
 
 # ----- multiprocessing function stubs -----
diff --git a/typeshed/stdlib/3/multiprocessing/context.pyi b/typeshed/stdlib/3/multiprocessing/context.pyi
index bd1eed7..ab095c9 100644
--- a/typeshed/stdlib/3/multiprocessing/context.pyi
+++ b/typeshed/stdlib/3/multiprocessing/context.pyi
@@ -3,7 +3,9 @@
 from logging import Logger
 import multiprocessing
 import sys
-from typing import Any, Callable, Optional, List, Sequence, Tuple, Type, Union
+from typing import (
+    Any, Callable, Iterable, Optional, List, Mapping, Sequence, Tuple, Type, Union,
+)
 
 class ProcessError(Exception): ...
 
@@ -49,13 +51,26 @@ class BaseContext(object):
     def JoinableQueue(self, maxsize: int = ...) -> Any: ...
     # TODO: change return to SimpleQueue once a stub exists in multiprocessing.queues
     def SimpleQueue(self) -> Any: ...
+    # N.B. This method is partially applied at runtime to generate
+    # multiprocessing.Pool, so the two signatures should be identical (modulo
+    # self).
     def Pool(
         self,
         processes: Optional[int] = ...,
         initializer: Optional[Callable[..., Any]] = ...,
-        initargs: Tuple = ...,
+        initargs: Iterable[Any] = ...,
         maxtasksperchild: Optional[int] = ...
-    ) -> multiprocessing.Pool: ...
+    ) -> multiprocessing.pool.Pool: ...
+    def Process(
+        self,
+        group: Any = ...,
+        target: Optional[Callable] = ...,
+        name: Optional[str] = ...,
+        args: Iterable[Any] = ...,
+        kwargs: Mapping[Any, Any] = ...,
+        *,
+        daemon: Optional[bool] = ...
+    ) -> multiprocessing.Process: ...
     # TODO: typecode_or_type param is a ctype with a base class of _SimpleCData or array.typecode Need to figure out
     # how to handle the ctype
     # TODO: change return to RawValue once a stub exists in multiprocessing.sharedctypes
diff --git a/typeshed/stdlib/3/multiprocessing/managers.pyi b/typeshed/stdlib/3/multiprocessing/managers.pyi
index c2bad5b..74634e3 100644
--- a/typeshed/stdlib/3/multiprocessing/managers.pyi
+++ b/typeshed/stdlib/3/multiprocessing/managers.pyi
@@ -5,7 +5,8 @@
 import queue
 import threading
 from typing import (
-    Any, Callable, Dict, Iterable, List, Mapping, Optional, Sequence, TypeVar
+    Any, Callable, ContextManager, Dict, Iterable, List, Mapping, Optional,
+    Sequence, TypeVar,
 )
 
 _T = TypeVar('_T')
@@ -16,13 +17,11 @@ class Namespace: ...
 
 _Namespace = Namespace
 
-class BaseManager:
+class BaseManager(ContextManager[BaseManager]):
     def register(self, typeid: str, callable: Any = ...) -> None: ...
     def shutdown(self) -> None: ...
     def start(self, initializer: Optional[Callable[..., Any]] = ...,
               initargs: Iterable[Any] = ...) -> None: ...
-    def __enter__(self) -> 'BaseManager': ...
-    def __exit__(self, exc_type, exc_value, tb) -> None: ...
 
 class SyncManager(BaseManager):
     def BoundedSemaphore(self, value: Any = ...) -> threading.BoundedSemaphore: ...
diff --git a/typeshed/stdlib/3/multiprocessing/pool.pyi b/typeshed/stdlib/3/multiprocessing/pool.pyi
index c3b7893..bdfd476 100644
--- a/typeshed/stdlib/3/multiprocessing/pool.pyi
+++ b/typeshed/stdlib/3/multiprocessing/pool.pyi
@@ -2,7 +2,12 @@
 
 # NOTE: These are incomplete!
 
-from typing import Any, Callable, Iterable, Mapping, Optional, Dict, List
+from typing import (
+    Any, Callable, ContextManager, Iterable, Mapping, Optional, Dict, List,
+    TypeVar,
+)
+
+_T = TypeVar('_T', bound='Pool')
 
 class AsyncResult():
     def get(self, timeout: float = ...) -> Any: ...
@@ -10,10 +15,12 @@ class AsyncResult():
     def ready(self) -> bool: ...
     def successful(self) -> bool: ...
 
-class ThreadPool():
-    def __init__(self, processes: Optional[int] = None,
-                 initializer: Optional[Callable[..., Any]] = None,
-                 initargs: Iterable[Any] = ...) -> None: ...
+class Pool(ContextManager[Pool]):
+    def __init__(self, processes: Optional[int] = ...,
+                 initializer: Optional[Callable[..., None]] = ...,
+                 initargs: Iterable[Any] = ...,
+                 maxtasksperchild: Optional[int] = ...,
+                 context: Optional[Any] = None) -> None: ...
     def apply(self,
               func: Callable[..., Any],
               args: Iterable[Any] = ...,
@@ -30,7 +37,7 @@ class ThreadPool():
             chunksize: Optional[int] = None) -> List[Any]: ...
     def map_async(self, func: Callable[..., Any],
                   iterable: Iterable[Any] = ...,
-                  chunksize: Optional[Optional[int]] = None,
+                  chunksize: Optional[int] = None,
                   callback: Optional[Callable[..., None]] = None,
                   error_callback: Optional[Callable[[BaseException], None]] = None) -> AsyncResult: ...
     def imap(self,
@@ -54,5 +61,11 @@ class ThreadPool():
     def close(self) -> None: ...
     def terminate(self) -> None: ...
     def join(self) -> None: ...
-    def __enter__(self) -> 'ThreadPool': ...
-    def __exit__(self, exc_type, exc_val, exc_tb) -> None: ...
+    def __enter__(self: _T) -> _T: ...
+
+
+class ThreadPool(Pool, ContextManager[ThreadPool]):
+
+    def __init__(self, processes: Optional[int] = None,
+                 initializer: Optional[Callable[..., Any]] = None,
+                 initargs: Iterable[Any] = ...) -> None: ...
diff --git a/typeshed/stdlib/3/os/__init__.pyi b/typeshed/stdlib/3/os/__init__.pyi
index 3f4a35c..c9e8e7a 100644
--- a/typeshed/stdlib/3/os/__init__.pyi
+++ b/typeshed/stdlib/3/os/__init__.pyi
@@ -321,25 +321,37 @@ def major(device: int) -> int: ...
 def minor(device: int) -> int: ...
 def makedev(major: int, minor: int) -> int: ...
 def mkdir(path: _PathType, mode: int = ...) -> None: ...
-def makedirs(path: _PathType, mode: int = ...,
-             exist_ok: bool = ...) -> None: ...
+if sys.version_info >= (3, 4):
+    def makedirs(name: _PathType, mode: int = ...,
+                 exist_ok: bool = ...) -> None: ...
+else:
+    def makedirs(path: _PathType, mode: int = ...,
+                 exist_ok: bool = ...) -> None: ...
 def pathconf(path: _PathType, name: Union[str, int]) -> int: ...  # Unix only
 if sys.version_info >= (3, 6):
     def readlink(path: Union[AnyStr, PathLike[AnyStr]]) -> AnyStr: ...
 else:
     def readlink(path: AnyStr) -> AnyStr: ...
 def remove(path: _PathType) -> None: ...
-def removedirs(path: _PathType) -> None: ...
+if sys.version_info >= (3, 4):
+    def removedirs(name: _PathType) -> None: ...
+else:
+    def removedirs(path: _PathType) -> None: ...
 def rename(src: _PathType, dst: _PathType) -> None: ...
 def renames(old: _PathType, new: _PathType) -> None: ...
 if sys.version_info >= (3, 3):
     def replace(src: _PathType, dst: _PathType) -> None: ...
 def rmdir(path: _PathType) -> None: ...
-if sys.version_info >= (3, 5):
+if sys.version_info >= (3, 6):
+    @overload
+    def scandir() -> Iterator[DirEntry[str]]: ...
+    @overload
+    def scandir(path: Union[AnyStr, PathLike[AnyStr]]) -> Iterator[DirEntry[AnyStr]]: ...
+elif sys.version_info >= (3, 5):
     @overload
-    def scandir(path: str = ...) -> Iterator[DirEntry[str]]: ...
+    def scandir() -> Iterator[DirEntry[str]]: ...
     @overload
-    def scandir(path: bytes) -> Iterator[DirEntry[bytes]]: ...
+    def scandir(path: AnyStr) -> Iterator[DirEntry[AnyStr]]: ...
 def stat(path: _PathType) -> stat_result: ...
 def stat_float_times(newvalue: Union[bool, None] = ...) -> bool: ...
 def statvfs(path: _PathType) -> statvfs_result: ...  # Unix only
@@ -347,9 +359,10 @@ def symlink(source: _PathType, link_name: _PathType,
             target_is_directory: bool = ...) -> None:
     ...  # final argument in Windows only
 def unlink(path: _PathType) -> None: ...
-# TODO: add ns, dir_fd, follow_symlinks argument
 if sys.version_info >= (3, 0):
-    def utime(path: _PathType, times: Optional[Tuple[float, float]] = ...) -> None: ...
+    def utime(path: _PathType, times: Optional[Union[Tuple[int, int], Tuple[float, float]]] = ...,
+              ns: Optional[Tuple[int, int]] = ..., dir_fd: Optional[int] = ...,
+              follow_symlinks: bool = ...) -> None: ...
 else:
     def utime(path: _PathType, times: Optional[Tuple[float, float]]) -> None: ...
 
diff --git a/typeshed/stdlib/3/os/path.pyi b/typeshed/stdlib/3/os/path.pyi
index 961c7a8..993d3aa 100644
--- a/typeshed/stdlib/3/os/path.pyi
+++ b/typeshed/stdlib/3/os/path.pyi
@@ -92,7 +92,10 @@ def sameopenfile(fp1: int, fp2: int) -> bool: ...
 # def samestat(stat1: stat_result,
 #             stat2: stat_result) -> bool: ...  # Unix only
 
-def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
+if sys.version_info >= (3, 6):
+    def split(path: Union[AnyStr, _PathLike[AnyStr]]) -> Tuple[AnyStr, AnyStr]: ...
+else:
+    def split(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
 def splitdrive(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
 def splitext(path: AnyStr) -> Tuple[AnyStr, AnyStr]: ...
 
diff --git a/typeshed/stdlib/3/queue.pyi b/typeshed/stdlib/3/queue.pyi
index 1d218c7..1eb7360 100644
--- a/typeshed/stdlib/3/queue.pyi
+++ b/typeshed/stdlib/3/queue.pyi
@@ -12,15 +12,19 @@ class Full(Exception): ...
 class Queue(Generic[_T]):
     maxsize = ...  # type: int
     def __init__(self, maxsize: int = ...) -> None: ...
+    def _init(self, maxsize: int) -> None: ...
     def empty(self) -> bool: ...
     def full(self) -> bool: ...
     def get(self, block: bool = ..., timeout: Optional[float] = ...) -> _T: ...
     def get_nowait(self) -> _T: ...
+    def _get(self) -> _T: ...
     def put(self, item: _T, block: bool = ..., timeout: Optional[float] = ...) -> None: ...
     def put_nowait(self, item: _T) -> None: ...
+    def _put(self, item: _T) -> None: ...
     def join(self) -> None: ...
     def qsize(self) -> int: ...
+    def _qsize(self) -> int: ...
     def task_done(self) -> None: ...
 
-class PriorityQueue(Queue): ...
-class LifoQueue(Queue): ...
+class PriorityQueue(Queue[_T]): ...
+class LifoQueue(Queue[_T]): ...
diff --git a/typeshed/stdlib/3/re.pyi b/typeshed/stdlib/3/re.pyi
index 68a06f7..451774f 100644
--- a/typeshed/stdlib/3/re.pyi
+++ b/typeshed/stdlib/3/re.pyi
@@ -5,64 +5,106 @@
 # based on: http://docs.python.org/3.2/library/re.html
 # and http://hg.python.org/cpython/file/618ea5612e83/Lib/re.py
 
+import sys
 from typing import (
     List, Iterator, overload, Callable, Tuple, Sequence, Dict,
     Generic, AnyStr, Match, Pattern, Any, Optional, Union
 )
 
 # ----- re variables and constants -----
-A = 0
-ASCII = 0
-DEBUG = 0
-I = 0
-IGNORECASE = 0
-L = 0
-LOCALE = 0
-M = 0
-MULTILINE = 0
-S = 0
-DOTALL = 0
-X = 0
-VERBOSE = 0
-U = 0
-UNICODE = 0
-T = 0
-TEMPLATE = 0
+if sys.version_info >= (3, 6):
+    import enum
+    class RegexFlag(enum.IntFlag):
+        A = 0
+        ASCII = 0
+        DEBUG = 0
+        I = 0
+        IGNORECASE = 0
+        L = 0
+        LOCALE = 0
+        M = 0
+        MULTILINE = 0
+        S = 0
+        DOTALL = 0
+        X = 0
+        VERBOSE = 0
+        U = 0
+        UNICODE = 0
+        T = 0
+        TEMPLATE = 0
+
+    A = RegexFlag.A
+    ASCII = RegexFlag.ASCII
+    DEBUG = RegexFlag.DEBUG
+    I = RegexFlag.I
+    IGNORECASE = RegexFlag.IGNORECASE
+    L = RegexFlag.L
+    LOCALE = RegexFlag.LOCALE
+    M = RegexFlag.M
+    MULTILINE = RegexFlag.MULTILINE
+    S = RegexFlag.S
+    DOTALL = RegexFlag.DOTALL
+    X = RegexFlag.X
+    VERBOSE = RegexFlag.VERBOSE
+    U = RegexFlag.U
+    UNICODE = RegexFlag.UNICODE
+    T = RegexFlag.T
+    TEMPLATE = RegexFlag.TEMPLATE
+    _FlagsType = Union[int, RegexFlag]
+else:
+    A = 0
+    ASCII = 0
+    DEBUG = 0
+    I = 0
+    IGNORECASE = 0
+    L = 0
+    LOCALE = 0
+    M = 0
+    MULTILINE = 0
+    S = 0
+    DOTALL = 0
+    X = 0
+    VERBOSE = 0
+    U = 0
+    UNICODE = 0
+    T = 0
+    TEMPLATE = 0
+    _FlagsType = int
 
 class error(Exception): ...
 
 @overload
-def compile(pattern: AnyStr, flags: int = ...) -> Pattern[AnyStr]: ...
+def compile(pattern: AnyStr, flags: _FlagsType = ...) -> Pattern[AnyStr]: ...
 @overload
-def compile(pattern: Pattern[AnyStr], flags: int = ...) -> Pattern[AnyStr]: ...
+def compile(pattern: Pattern[AnyStr], flags: _FlagsType = ...) -> Pattern[AnyStr]: ...
 
 @overload
-def search(pattern: AnyStr, string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
+def search(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr]: ...
 @overload
-def search(pattern: Pattern[AnyStr], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
+def search(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr]: ...
 
 @overload
-def match(pattern: AnyStr, string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
+def match(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr]: ...
 @overload
-def match(pattern: Pattern[AnyStr], string: AnyStr, flags: int = ...) -> Match[AnyStr]: ...
+def match(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr]: ...
 
 # New in Python 3.4
 @overload
-def fullmatch(pattern: AnyStr, string: AnyStr, flags: int = ...) -> Optional[Match[AnyStr]]: ...
+def fullmatch(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Optional[Match[AnyStr]]: ...
 @overload
-def fullmatch(pattern: Pattern[AnyStr], string: AnyStr, flags: int = ...) -> Optional[Match[AnyStr]]: ...
+def fullmatch(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Optional[Match[AnyStr]]: ...
 
 @overload
 def split(pattern: AnyStr, string: AnyStr,
-          maxsplit: int = ..., flags: int = ...) -> List[AnyStr]: ...
+          maxsplit: int = ..., flags: _FlagsType = ...) -> List[AnyStr]: ...
 @overload
 def split(pattern: Pattern[AnyStr], string: AnyStr,
-          maxsplit: int = ..., flags: int = ...) -> List[AnyStr]: ...
+          maxsplit: int = ..., flags: _FlagsType = ...) -> List[AnyStr]: ...
 
 @overload
-def findall(pattern: AnyStr, string: AnyStr, flags: int = ...) -> List[Any]: ...
+def findall(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> List[Any]: ...
 @overload
-def findall(pattern: Pattern[AnyStr], string: AnyStr, flags: int = ...) -> List[Any]: ...
+def findall(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> List[Any]: ...
 
 # Return an iterator yielding match objects over all non-overlapping matches
 # for the RE pattern in string. The string is scanned left-to-right, and
@@ -70,40 +112,40 @@ def findall(pattern: Pattern[AnyStr], string: AnyStr, flags: int = ...) -> List[
 # result unless they touch the beginning of another match.
 @overload
 def finditer(pattern: AnyStr, string: AnyStr,
-             flags: int = ...) -> Iterator[Match[AnyStr]]: ...
+             flags: _FlagsType = ...) -> Iterator[Match[AnyStr]]: ...
 @overload
 def finditer(pattern: Pattern[AnyStr], string: AnyStr,
-             flags: int = ...) -> Iterator[Match[AnyStr]]: ...
+             flags: _FlagsType = ...) -> Iterator[Match[AnyStr]]: ...
 
 @overload
 def sub(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ...,
-        flags: int = ...) -> AnyStr: ...
+        flags: _FlagsType = ...) -> AnyStr: ...
 @overload
 def sub(pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr],
-        string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ...
+        string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> AnyStr: ...
 @overload
 def sub(pattern: Pattern[AnyStr], repl: AnyStr, string: AnyStr, count: int = ...,
-        flags: int = ...) -> AnyStr: ...
+        flags: _FlagsType = ...) -> AnyStr: ...
 @overload
 def sub(pattern: Pattern[AnyStr], repl: Callable[[Match[AnyStr]], AnyStr],
-        string: AnyStr, count: int = ..., flags: int = ...) -> AnyStr: ...
+        string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> AnyStr: ...
 
 @overload
 def subn(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ...,
-         flags: int = ...) -> Tuple[AnyStr, int]: ...
+         flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ...
 @overload
 def subn(pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr],
          string: AnyStr, count: int = ...,
-         flags: int = ...) -> Tuple[AnyStr, int]: ...
+         flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ...
 @overload
 def subn(pattern: Pattern[AnyStr], repl: AnyStr, string: AnyStr, count: int = ...,
-         flags: int = ...) -> Tuple[AnyStr, int]: ...
+         flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ...
 @overload
 def subn(pattern: Pattern[AnyStr], repl: Callable[[Match[AnyStr]], AnyStr],
          string: AnyStr, count: int = ...,
-         flags: int = ...) -> Tuple[AnyStr, int]: ...
+         flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ...
 
 def escape(string: AnyStr) -> AnyStr: ...
 
 def purge() -> None: ...
-def template(pattern: Union[AnyStr, Pattern[AnyStr]], flags: int = ...) -> Pattern[AnyStr]: ...
+def template(pattern: Union[AnyStr, Pattern[AnyStr]], flags: _FlagsType = ...) -> Pattern[AnyStr]: ...
diff --git a/typeshed/stdlib/3/resource.pyi b/typeshed/stdlib/3/resource.pyi
index 2dcb913..b4c74d6 100644
--- a/typeshed/stdlib/3/resource.pyi
+++ b/typeshed/stdlib/3/resource.pyi
@@ -2,7 +2,7 @@
 
 # NOTE: These are incomplete!
 
-from typing import Tuple
+from typing import Tuple, Optional, NamedTuple
 
 RLIMIT_AS = ...  # type: int
 RLIMIT_CORE = ...  # type: int
@@ -25,7 +25,17 @@ RUSAGE_CHILDREN = ...  # type: int
 RUSAGE_SELF = ...  # type: int
 RUSAGE_THREAD = ...  # type: int
 
+_RUsage = NamedTuple('_RUsage', [('ru_utime', float), ('ru_stime', float), ('ru_maxrss', int),
+                                 ('ru_ixrss', int), ('ru_idrss', int), ('ru_isrss', int),
+                                 ('ru_minflt', int), ('ru_majflt', int), ('ru_nswap', int),
+                                 ('ru_inblock', int), ('ru_oublock', int), ('ru_msgsnd', int),
+                                 ('ru_msgrcv', int), ('ru_nsignals', int), ('ru_nvcsw', int),
+                                 ('ru_nivcsw', int)])
+
+def getpagesize() -> int: ...
 def getrlimit(resource: int) -> Tuple[int, int]: ...
+def getrusage(who: int) -> _RUsage: ...
+def prlimit(pid: int, resource: int, limits: Optional[Tuple[int, int]]) -> Tuple[int, int]: ...
 def setrlimit(resource: int, limits: Tuple[int, int]) -> None: ...
 
 # NOTE: This is an alias of OSError in Python 3.3.
diff --git a/typeshed/stdlib/3/shlex.pyi b/typeshed/stdlib/3/shlex.pyi
index ed23d5a..6936d61 100644
--- a/typeshed/stdlib/3/shlex.pyi
+++ b/typeshed/stdlib/3/shlex.pyi
@@ -2,7 +2,8 @@
 
 # Based on http://docs.python.org/3.2/library/shlex.html
 
-from typing import List, Tuple, Any, TextIO, Union, Optional
+from typing import List, Tuple, Any, TextIO, Union, Optional, Iterator
+import sys
 
 def split(s: str, comments: bool = ...,
           posix: bool = ...) -> List[str]: ...
@@ -10,7 +11,7 @@ def split(s: str, comments: bool = ...,
 # Added in 3.3, use (undocumented) pipes.quote in previous versions.
 def quote(s: str) -> str: ...
 
-class shlex:
+class shlex(Iterator[str]):
     commenters = ...  # type: str
     wordchars = ...  # type: str
     whitespace = ...  # type: str
@@ -25,9 +26,15 @@ class shlex:
     lineno = 0
     token = ...  # type: str
     eof = ...  # type: str
+    if sys.version_info >= (3, 6):
+        punctuation_chars = ...  # type: str
 
-    def __init__(self, instream: Union[str, TextIO] = ..., infile: Optional[str] = ...,
-                 posix: bool = ...) -> None: ...
+    if sys.version_info >= (3, 6):
+        def __init__(self, instream: Union[str, TextIO] = ..., infile: Optional[str] = ...,
+                     posix: bool = ..., punctuation_chars: Union[bool, str] = ...) -> None: ...
+    else:
+        def __init__(self, instream: Union[str, TextIO] = ..., infile: Optional[str] = ...,
+                     posix: bool = ...) -> None: ...
     def get_token(self) -> str: ...
     def push_token(self, tok: str) -> None: ...
     def read_token(self) -> str: ...
diff --git a/typeshed/stdlib/3/smtplib.pyi b/typeshed/stdlib/3/smtplib.pyi
index eea342e..9822382 100644
--- a/typeshed/stdlib/3/smtplib.pyi
+++ b/typeshed/stdlib/3/smtplib.pyi
@@ -1,25 +1,30 @@
-from typing import Any
+from email.message import Message as _Message
+from typing import (
+    Any, AnyStr, Dict, Generic, List, Optional, Sequence, Tuple, Union)
+
+_Reply = Tuple[int, bytes]
+_SendErrs = Dict[str, _Reply]
 
 class SMTPException(OSError): ...
 class SMTPServerDisconnected(SMTPException): ...
 
 class SMTPResponseException(SMTPException):
-    smtp_code = ...  # type: Any
-    smtp_error = ...  # type: Any
-    args = ...  # type: Any
-    def __init__(self, code, msg) -> None: ...
+    smtp_code = ...  # type: int
+    smtp_error = ...  # type: Union[bytes, str]
+    args = ...  # type: Union[Tuple[int, Union[bytes, str]], Tuple[int, bytes, str]]
+    def __init__(self, code: int, msg: Union[bytes, str]) -> None: ...
 
 class SMTPSenderRefused(SMTPResponseException):
-    smtp_code = ...  # type: Any
-    smtp_error = ...  # type: Any
-    sender = ...  # type: Any
-    args = ...  # type: Any
-    def __init__(self, code, msg, sender) -> None: ...
+    smtp_code = ...  # type: int
+    smtp_error = ...  # type: bytes
+    sender = ...  # type: str
+    args = ...  # type: Tuple[int, bytes, str]
+    def __init__(self, code: int, msg: bytes, sender: str) -> None: ...
 
 class SMTPRecipientsRefused(SMTPException):
-    recipients = ...  # type: Any
-    args = ...  # type: Any
-    def __init__(self, recipients) -> None: ...
+    recipients = ...  # type: _SendErrs
+    args = ...  # type: Tuple[_SendErrs]
+    def __init__(self, recipients: _SendErrs) -> None: ...
 
 class SMTPDataError(SMTPResponseException): ...
 class SMTPConnectError(SMTPResponseException): ...
@@ -30,36 +35,37 @@ def quoteaddr(addrstring): ...
 def quotedata(data): ...
 
 class SMTP:
-    debuglevel = ...  # type: Any
+    debuglevel = ...  # type: int
     file = ...  # type: Any
     helo_resp = ...  # type: Any
     ehlo_msg = ...  # type: Any
     ehlo_resp = ...  # type: Any
     does_esmtp = ...  # type: Any
     default_port = ...  # type: Any
-    timeout = ...  # type: Any
+    timeout = ...  # type: float
     esmtp_features = ...  # type: Any
     source_address = ...  # type: Any
     local_hostname = ...  # type: Any
-    def __init__(self, host=..., port=..., local_hostname=..., timeout=...,
-                 source_address=...): ...
+    def __init__(self, host: str = ..., port: int = ...,
+                 local_hostname: Optional[str] = ..., timeout: float = ...,
+                 source_address: Tuple[str, int] = ...) -> None: ...
     def __enter__(self): ...
     def __exit__(self, *args): ...
-    def set_debuglevel(self, debuglevel): ...
+    def set_debuglevel(self, debuglevel: int) -> None: ...
     sock = ...  # type: Any
     def connect(self, host=..., port=..., source_address=...): ...
     def send(self, s): ...
     def putcmd(self, cmd, args=...): ...
-    def getreply(self): ...
+    def getreply(self) -> _Reply: ...
     def docmd(self, cmd, args=...): ...
     def helo(self, name=...): ...
     def ehlo(self, name=...): ...
     def has_extn(self, opt): ...
     def help(self, args=...): ...
-    def rset(self): ...
-    def noop(self): ...
-    def mail(self, sender, options=...): ...
-    def rcpt(self, recip, options=...): ...
+    def rset(self) -> _Reply: ...
+    def noop(self) -> _Reply: ...
+    def mail(self, sender: str, options: Sequence[str] = ...) -> _Reply: ...
+    def rcpt(self, recip: str, options: Sequence[str] = ...) -> _Reply: ...
     def data(self, msg): ...
     def verify(self, address): ...
     vrfy = ...  # type: Any
@@ -67,12 +73,15 @@ class SMTP:
     def ehlo_or_helo_if_needed(self): ...
     def login(self, user, password): ...
     def starttls(self, keyfile=..., certfile=..., context=...): ...
-    def sendmail(self, from_addr, to_addrs, msg, mail_options=...,
-                 rcpt_options=...): ...
-    def send_message(self, msg, from_addr=..., to_addrs=..., mail_options=...,
-                     rcpt_options=...): ...
+    def sendmail(self, from_addr: str, to_addrs: Union[str, Sequence[str]],
+                 msg: Union[bytes, str], mail_options: Sequence[str] = ...,
+                 rcpt_options: List[str] = ...) -> _SendErrs: ...
+    def send_message(self, msg: _Message, from_addr: Optional[str] = ...,
+                     to_addrs: Optional[Union[str, Sequence[str]]] = ...,
+                     mail_options: List[str] = ...,
+                     rcpt_options: Sequence[str] = ...) -> _SendErrs: ...
     def close(self): ...
-    def quit(self): ...
+    def quit(self) -> _Reply: ...
 
 class SMTP_SSL(SMTP):
     default_port = ...  # type: Any
@@ -84,7 +93,9 @@ class SMTP_SSL(SMTP):
 
 class LMTP(SMTP):
     ehlo_msg = ...  # type: Any
-    def __init__(self, host=..., port=..., local_hostname=..., source_address=...) -> None: ...
+    def __init__(self, host: str = ..., port: int = ...,
+                 local_hostname: Optional[str] = ...,
+                 source_address: Optional[Tuple[str, int]] = ...) -> None: ...
     sock = ...  # type: Any
     file = ...  # type: Any
     def connect(self, host=..., port=..., source_address=...): ...
diff --git a/typeshed/stdlib/3/sre_constants.pyi b/typeshed/stdlib/3/sre_constants.pyi
new file mode 100644
index 0000000..c1dba3a
--- /dev/null
+++ b/typeshed/stdlib/3/sre_constants.pyi
@@ -0,0 +1,40 @@
+# Source: https://github.com/python/cpython/blob/master/Lib/sre_constants.py
+
+from typing import Any, Dict, List, Optional, Union
+
+MAGIC = ...  # type: int
+
+class error(Exception):
+    msg = ...  # type: str
+    pattern = ...  # type: Optional[Union[str, bytes]]
+    pos = ...  # type: Optional[int]
+    lineno = ...  # type: int
+    colno = ...  # type: int
+    def __init__(self, msg: str, pattern: Union[str, bytes] = ..., pos: int = ...) -> None: ...
+
+class _NamedIntConstant(int):
+    name = ...  # type: Any
+    def __new__(cls, value: int, name: str): ...
+
+MAXREPEAT = ...  # type: _NamedIntConstant
+OPCODES = ...  # type: List[_NamedIntConstant]
+ATCODES = ...  # type: List[_NamedIntConstant]
+CHCODES = ...  # type: List[_NamedIntConstant]
+OP_IGNORE = ...  # type: Dict[_NamedIntConstant, _NamedIntConstant]
+AT_MULTILINE = ...  # type: Dict[_NamedIntConstant, _NamedIntConstant]
+AT_LOCALE = ...  # type: Dict[_NamedIntConstant, _NamedIntConstant]
+AT_UNICODE = ...  # type: Dict[_NamedIntConstant, _NamedIntConstant]
+CH_LOCALE = ...  # type: Dict[_NamedIntConstant, _NamedIntConstant]
+CH_UNICODE = ...  # type: Dict[_NamedIntConstant, _NamedIntConstant]
+SRE_FLAG_TEMPLATE = ...  # type: int
+SRE_FLAG_IGNORECASE = ...  # type: int
+SRE_FLAG_LOCALE = ...  # type: int
+SRE_FLAG_MULTILINE = ...  # type: int
+SRE_FLAG_DOTALL = ...  # type: int
+SRE_FLAG_UNICODE = ...  # type: int
+SRE_FLAG_VERBOSE = ...  # type: int
+SRE_FLAG_DEBUG = ...  # type: int
+SRE_FLAG_ASCII = ...  # type: int
+SRE_INFO_PREFIX = ...  # type: int
+SRE_INFO_LITERAL = ...  # type: int
+SRE_INFO_CHARSET = ...  # type: int
diff --git a/typeshed/stdlib/3/sre_parse.pyi b/typeshed/stdlib/3/sre_parse.pyi
new file mode 100644
index 0000000..0c9859d
--- /dev/null
+++ b/typeshed/stdlib/3/sre_parse.pyi
@@ -0,0 +1,81 @@
+# Source: https://github.com/python/cpython/blob/master/Lib/sre_parse.py
+
+from typing import (
+    Any, Dict, FrozenSet, Iterable, List, Match,
+    Optional, Pattern as _Pattern, Tuple, Union
+)
+from sre_constants import _NamedIntConstant as NIC, error as _Error
+
+SPECIAL_CHARS = ...  # type: str
+REPEAT_CHARS = ...  # type: str
+DIGITS = ...  # type: FrozenSet[str]
+OCTDIGITS = ...  # type: FrozenSet[str]
+HEXDIGITS = ...  # type: FrozenSet[str]
+ASCIILETTERS = ...  # type: FrozenSet[str]
+WHITESPACE = ...  # type: FrozenSet[str]
+ESCAPES = ...  # type: Dict[str, Tuple[NIC, int]]
+CATEGORIES = ...  # type: Dict[str, Union[Tuple[NIC, NIC], Tuple[NIC, List[Tuple[NIC, NIC]]]]]
+FLAGS = ...  # type: Dict[str, int]
+GLOBAL_FLAGS = ...  # type: int
+
+class Verbose(Exception): ...
+
+class Pattern:
+    flags = ...  # type: int
+    groupdict = ...  # type: Dict[str, int]
+    groupwidths = ...  # type: List[Optional[int]]
+    lookbehindgroups = ...  # type: Optional[int]
+    def __init__(self) -> None: ...
+    @property
+    def groups(self) -> int: ...
+    def opengroup(self, name: str = ...) -> int: ...
+    def closegroup(self, gid: int, p: SubPattern) -> None: ...
+    def checkgroup(self, gid: int) -> bool: ...
+    def checklookbehindgroup(self, gid: int, source: Tokenizer) -> None: ...
+
+
+_OpSubpatternType = Tuple[Optional[int], int, int, SubPattern]
+_OpGroupRefExistsType = Tuple[int, SubPattern, SubPattern]
+_OpInType = List[Tuple[NIC, int]]
+_OpBranchType = Tuple[None, List[SubPattern]]
+_AvType = Union[_OpInType, _OpBranchType, Iterable[SubPattern], _OpGroupRefExistsType, _OpSubpatternType]
+_CodeType = Tuple[NIC, _AvType]
+
+
+class SubPattern:
+    pattern = ...  # type: Pattern
+    data = ...  # type: List[_CodeType]
+    width = ...  # type: Optional[int]
+    def __init__(self, pattern: Pattern, data: List[_CodeType] = ...) -> None: ...
+    def dump(self, level: int = ...) -> None: ...
+    def __len__(self) -> int: ...
+    def __delitem__(self, index: Union[int, slice]) -> None: ...
+    def __getitem__(self, index: Union[int, slice]) -> Union[SubPattern, _CodeType]: ...
+    def __setitem__(self, index: Union[int, slice], code: _CodeType) -> None: ...
+    def insert(self, index: int, code: _CodeType) -> None: ...
+    def append(self, code: _CodeType) -> None: ...
+    def getwidth(self) -> int: ...
+
+
+class Tokenizer:
+    istext = ...  # type: bool
+    string = ...  # type: Any
+    decoded_string = ...  # type: str
+    index = ...  # type: int
+    next = ...  # type: Optional[str]
+    def __init__(self, string: Any) -> None: ...
+    def match(self, char: str) -> bool: ...
+    def get(self) -> Optional[str]: ...
+    def getwhile(self, n: int, charset: Iterable[str]) -> str: ...
+    def getuntil(self, terminator: str) -> str: ...
+    @property
+    def pos(self) -> int: ...
+    def tell(self) -> int: ...
+    def seek(self, index: int) -> None: ...
+    def error(self, msg: str, offset: int = ...) -> _Error: ...
+
+def fix_flags(src: Union[str, bytes], flag: int) -> int: ...
+def parse(str: str, flags: int = ..., pattern: Pattern = ...) -> SubPattern: ...
+_TemplateType = Tuple[List[Tuple[int, int]], List[str]]
+def parse_template(source: str, pattern: _Pattern) -> _TemplateType: ...
+def expand_template(template: _TemplateType, match: Match) -> str: ...
diff --git a/typeshed/stdlib/3/ssl.pyi b/typeshed/stdlib/3/ssl.pyi
index 9880504..060bccd 100644
--- a/typeshed/stdlib/3/ssl.pyi
+++ b/typeshed/stdlib/3/ssl.pyi
@@ -102,6 +102,11 @@ PROTOCOL_TLSv1 = ...  # type: int
 if sys.version_info >= (3, 4):
     PROTOCOL_TLSv1_1 = ...  # type: int
     PROTOCOL_TLSv1_2 = ...  # type: int
+if sys.version_info >= (3, 5):
+    PROTOCOL_TLS = ...  # type: int
+if sys.version_info >= (3, 6):
+    PROTOCOL_TLS_CLIENT = ...  # type: int
+    PROTOCOL_TLS_SERVER = ...  # type: int
 
 OP_ALL = ...  # type: int
 OP_NO_SSLv2 = ...  # type: int
@@ -114,6 +119,8 @@ OP_CIPHER_SERVER_PREFERENCE = ...  # type: int
 OP_SINGLE_DH_USE = ...  # type: int
 OP_SINGLE_ECDH_USE = ...  # type: int
 OP_NO_COMPRESSION = ...  # type: int
+if sys.version_info >= (3, 6):
+    OP_NO_TICKET = ...  # type: int
 
 if sys.version_info >= (3, 5):
     HAS_ALPN = ...  # type: int
@@ -168,6 +175,10 @@ class SSLSocket(socket.socket):
     context = ...  # type: SSLContext
     server_side = ...  # type: bool
     server_hostname = ...  # type: Optional[str]
+    if sys.version_info >= (3, 6):
+        session = ...  # type: Optional[SSLSession]
+        session_reused = ...  # type: Optional[bool]
+
     def read(self, len: int = ...,
              buffer: Optional[bytearray] = ...) -> bytes: ...
     def write(self, buf: bytes) -> int: ...
@@ -237,6 +248,9 @@ if sys.version_info >= (3, 5):
         context = ...  # type: SSLContext
         server_side = ...  # type: bool
         server_hostname = ...  # type: Optional[str]
+        if sys.version_info >= (3, 6):
+            session = ...  # type: Optional[SSLSession]
+            session_reused = ...  # type: bool
         def read(self, len: int = ...,
                  buffer: Optional[bytearray] = ...) -> bytes: ...
         def write(self, buf: bytes) -> int: ...
@@ -257,6 +271,14 @@ if sys.version_info >= (3, 5):
         def write(self, buf: bytes) -> int: ...
         def write_eof(self) -> None: ...
 
+if sys.version_info >= (3, 6):
+    class SSLSession:
+        id = ...  # type: bytes
+        time = ...  # type: int
+        timeout = ...  # type: int
+        ticket_lifetime_hint = ...  # type: int
+        has_ticket = ...  # type: bool
+
 
 # TODO below documented in cpython but not in docs.python.org
 # taken from python 3.4
diff --git a/typeshed/stdlib/3/subprocess.pyi b/typeshed/stdlib/3/subprocess.pyi
index 75d19fe..c0a8613 100644
--- a/typeshed/stdlib/3/subprocess.pyi
+++ b/typeshed/stdlib/3/subprocess.pyi
@@ -238,11 +238,10 @@ else:
                      ) -> Any: ...  # morally: -> _TXT
 
 
-# TODO types
-PIPE = ...  # type: Any
-STDOUT = ...  # type: Any
+PIPE = ...  # type: int
+STDOUT = ...  # type: int
 if sys.version_info >= (3, 3):
-    DEVNULL = ...  # type: Any
+    DEVNULL = ...  # type: int
     class SubprocessError(Exception): ...
     class TimeoutExpired(SubprocessError): ...
 
diff --git a/typeshed/stdlib/3/sys.pyi b/typeshed/stdlib/3/sys.pyi
index b3042e2..76e4cd6 100644
--- a/typeshed/stdlib/3/sys.pyi
+++ b/typeshed/stdlib/3/sys.pyi
@@ -8,7 +8,7 @@ from typing import (
     TypeVar, Callable, Type,
 )
 import sys
-from types import TracebackType
+from types import FrameType, TracebackType
 from mypy_extensions import NoReturn
 
 _T = TypeVar('_T')
@@ -20,18 +20,18 @@ byteorder = ...  # type: str
 builtin_module_names = ...  # type: Sequence[str] # actually a tuple of strings
 copyright = ...  # type: str
 # dllhandle = 0  # Windows only
-dont_write_bytecode = False
+dont_write_bytecode: bool
 __displayhook__ = ...  # type: Any # contains the original value of displayhook
 __excepthook__ = ...  # type: Any  # contains the original value of excepthook
 exec_prefix = ...  # type: str
 executable = ...  # type: str
 float_repr_style = ...  # type: str
-hexversion = 0  # this is a 32-bit int
+hexversion: int
 last_type = ...  # type: Any
 last_value = ...  # type: Any
 last_traceback = ...  # type: Any
-maxsize = 0
-maxunicode = 0
+maxsize: int
+maxunicode: int
 meta_path = ...  # type: List[Any]
 modules = ...  # type: Dict[str, Any]
 path = ...  # type: List[str]
@@ -90,16 +90,16 @@ class _float_info:
 
 hash_info = ...  # type: _hash_info
 class _hash_info:
-    width = 0    # width in bits used for hash values
-    modulus = 0  # prime modulus P used for numeric hash scheme
-    inf = 0      # hash value returned for a positive infinity
-    nan = 0      # hash value returned for a nan
-    imag = 0     # multiplier used for the imaginary part of a complex number
+    width = 0
+    modulus = 0
+    inf = 0
+    nan = 0
+    imag = 0
 
 int_info = ...  # type: _int_info
 class _int_info:
-    bits_per_digit = 0  # number of bits held in each digit. Python integers are stored internally in base 2**int_info.bits_per_digit
-    sizeof_digit = 0    # size in bytes of C type used to represent a digit
+    bits_per_digit = 0
+    sizeof_digit = 0
 
 class _version_info(Tuple[int, int, int, str, int]):
     major = 0
@@ -109,8 +109,6 @@ class _version_info(Tuple[int, int, int, str, int]):
     serial = 0
 version_info = ...  # type: _version_info
 
-
-# ----- sys function stubs -----
 def call_tracing(fn: Callable[..., _T], args: Any) -> _T: ...
 def _clear_type_cache() -> None: ...
 def _current_frames() -> Dict[int, Any]: ...
@@ -122,12 +120,12 @@ def exc_info() -> Tuple[Optional[Type[BaseException]],
                         Optional[BaseException],
                         Optional[TracebackType]]: ...
 # sys.exit() accepts an optional argument of anything printable
-def exit(arg: Any = ...) -> NoReturn:
+def exit(arg: object = ...) -> NoReturn:
     raise SystemExit()
 def getcheckinterval() -> int: ...  # deprecated
 def getdefaultencoding() -> str: ...
 def getdlopenflags() -> int: ...  # Unix only
-def getfilesystemencoding() -> str: ...  # cannot return None
+def getfilesystemencoding() -> str: ...
 def getrefcount(arg: Any) -> int: ...
 def getrecursionlimit() -> int: ...
 
@@ -139,12 +137,18 @@ def getsizeof(obj: object, default: int) -> int: ...
 def getswitchinterval() -> float: ...
 
 @overload
-def _getframe() -> Any: ...
+def _getframe() -> FrameType: ...
 @overload
-def _getframe(depth: int) -> Any: ...
+def _getframe(depth: int) -> FrameType: ...
+
+_ProfileFunc = Callable[[FrameType, str, Any], Any]
+def getprofile() -> Optional[_ProfileFunc]: ...
+def setprofile(profilefunc: _ProfileFunc) -> None: ...
+
+_TraceFunc = Callable[[FrameType, str, Any], Optional[Callable[[FrameType, str, Any], Any]]]
+def gettrace() -> Optional[_TraceFunc]: ...
+def settrace(tracefunc: _TraceFunc) -> None: ...
 
-def getprofile() -> Any: ...  # TODO return type
-def gettrace() -> Any: ...  # TODO return
 def getwindowsversion() -> Any: ...  # Windows only, TODO return type
 def intern(string: str) -> str: ...
 
@@ -153,14 +157,8 @@ if sys.version_info >= (3, 5):
 
 def setcheckinterval(interval: int) -> None: ...  # deprecated
 def setdlopenflags(n: int) -> None: ...  # Linux only
-def setprofile(profilefunc: Any) -> None: ...  # TODO type
 def setrecursionlimit(limit: int) -> None: ...
 def setswitchinterval(interval: float) -> None: ...
-def settrace(tracefunc: Any) -> None: ...  # TODO type
-# Trace functions should have three arguments: frame, event, and arg. frame
-# is the current stack frame. event is a string: 'call', 'line', 'return',
-# 'exception', 'c_call', 'c_return', or 'c_exception'. arg depends on the
-# event type.
 def settscdump(on_flag: bool) -> None: ...
 
 def gettotalrefcount() -> int: ...  # Debug builds only
diff --git a/typeshed/stdlib/3/time.pyi b/typeshed/stdlib/3/time.pyi
index 597fb8e..bde37d2 100644
--- a/typeshed/stdlib/3/time.pyi
+++ b/typeshed/stdlib/3/time.pyi
@@ -91,4 +91,4 @@ if sys.version_info >= (3, 3):
     if sys.platform != 'win32':
         def clock_getres(clk_id: int) -> float: ...  # Unix only
         def clock_gettime(clk_id: int) -> float: ...  # Unix only
-        def clock_settime(clk_id: int, time: struct_time) -> float: ...  # Unix only
+        def clock_settime(clk_id: int, time: float) -> None: ...  # Unix only
diff --git a/typeshed/stdlib/3/tkinter/__init__.pyi b/typeshed/stdlib/3/tkinter/__init__.pyi
index 4ef7079..f7f7f8c 100644
--- a/typeshed/stdlib/3/tkinter/__init__.pyi
+++ b/typeshed/stdlib/3/tkinter/__init__.pyi
@@ -57,12 +57,12 @@ class Misc:
     def tk_bisque(self): ...
     def tk_setPalette(self, *args, **kw): ...
     def tk_menuBar(self, *args): ...
-    def wait_variable(self, name=''): ...
+    def wait_variable(self, name: str = ...): ...
     waitvar = ...  # type: Any
     def wait_window(self, window=None): ...
     def wait_visibility(self, window=None): ...
-    def setvar(self, name='', value=''): ...
-    def getvar(self, name=''): ...
+    def setvar(self, name: str = ..., value: str = ...): ...
+    def getvar(self, name: str = ...): ...
     def getint(self, s): ...
     def getdouble(self, s): ...
     def getboolean(self, s): ...
@@ -278,14 +278,14 @@ class Tk(Misc, Wm):
     master = ...  # type: Any
     children = ...  # type: Any
     tk = ...  # type: Any
-    def __init__(self, screenName=None, baseName=None, className='', useTk=1, sync=0, use=None): ...
+    def __init__(self, screenName=None, baseName=None, className: str = ..., useTk=1, sync=0, use=None) -> None: ...
     def loadtk(self): ...
     def destroy(self): ...
     def readprofile(self, baseName, className): ...
     def report_callback_exception(self, exc, val, tb): ...
     def __getattr__(self, attr): ...
 
-def Tcl(screenName=None, baseName=None, className='', useTk=0): ...
+def Tcl(screenName=None, baseName=None, className: str = ..., useTk=0): ...
 
 class Pack:
     def pack_configure(self, cnf=..., **kw): ...
@@ -461,7 +461,7 @@ class Listbox(Widget, XView, YView):
 
 class Menu(Widget):
     def __init__(self, master=None, cnf=..., **kw): ...
-    def tk_popup(self, x, y, entry=''): ...
+    def tk_popup(self, x, y, entry: str = ...): ...
     def tk_bindForTraversal(self): ...
     def activate(self, index): ...
     def add(self, itemType, cnf=..., **kw): ...
@@ -603,8 +603,8 @@ class PhotoImage(Image):
     def cget(self, option): ...
     def __getitem__(self, key): ...
     def copy(self): ...
-    def zoom(self, x, y=''): ...
-    def subsample(self, x, y=''): ...
+    def zoom(self, x, y: str = ...): ...
+    def subsample(self, x, y: str = ...): ...
     def get(self, x, y): ...
     def put(self, data, to=None): ...
     def write(self, filename, format=None, from_coords=None): ...
diff --git a/typeshed/stdlib/3/types.pyi b/typeshed/stdlib/3/types.pyi
index 9894742..f9f46c5 100644
--- a/typeshed/stdlib/3/types.pyi
+++ b/typeshed/stdlib/3/types.pyi
@@ -140,6 +140,7 @@ class MethodType:
     __func__ = ...  # type: _StaticFunctionType
     __self__ = ...  # type: object
     __name__ = ...  # type: str
+    def __init__(self, func: Callable, obj: object) -> None: ...
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
 class BuiltinFunctionType:
     __self__ = ...  # type: Union[object, ModuleType]
diff --git a/typeshed/stdlib/3/typing.pyi b/typeshed/stdlib/3/typing.pyi
index b8d7327..71f97ce 100644
--- a/typeshed/stdlib/3/typing.pyi
+++ b/typeshed/stdlib/3/typing.pyi
@@ -25,6 +25,11 @@ ClassVar: _SpecialForm = ...
 
 class GenericMeta(type): ...
 
+# Return type that indicates a function does not return.
+# This type is equivalent to the None type, but the no-op Union is necessary to
+# distinguish the None type from the None value.
+NoReturn = Union[None]
+
 # Type aliases and type constructors
 
 class TypeAlias:
@@ -116,9 +121,8 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]):
     def send(self, value: _T_contra) -> _T_co: ...
 
     @abstractmethod
-    def throw(self, typ: Type[BaseException], val: Optional[BaseException] = None,
-              # TODO: tb should be TracebackType but that's defined in types
-              tb: Any = None) -> None: ...
+    def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ...,
+              tb: Optional[TracebackType] = ...) -> None: ...
 
     @abstractmethod
     def close(self) -> None: ...
@@ -144,9 +148,8 @@ class Coroutine(Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co]):
     def send(self, value: _T_contra) -> _T_co: ...
 
     @abstractmethod
-    def throw(self, typ: Type[BaseException], val: Optional[BaseException] = None,
-              # TODO: tb should be TracebackType but that's defined in types
-              tb: Any = None) -> None: ...
+    def throw(self, typ: Type[BaseException], val: Optional[BaseException] = ...,
+              tb: Optional[TracebackType] = ...) -> None: ...
 
     @abstractmethod
     def close(self) -> None: ...
@@ -399,8 +402,7 @@ class IO(Iterator[AnyStr], Generic[AnyStr]):
     def __enter__(self) -> 'IO[AnyStr]': ...
     @abstractmethod
     def __exit__(self, t: Optional[Type[BaseException]], value: Optional[BaseException],
-                 # TODO: traceback should be TracebackType but that's defined in types
-                 traceback: Optional[Any]) -> bool: ...
+                 traceback: Optional[TracebackType]) -> bool: ...
 
 class BinaryIO(IO[bytes]):
     # TODO readinto
diff --git a/typeshed/stdlib/3/unittest/__init__.pyi b/typeshed/stdlib/3/unittest/__init__.pyi
index 68d1f56..6c0e409 100644
--- a/typeshed/stdlib/3/unittest/__init__.pyi
+++ b/typeshed/stdlib/3/unittest/__init__.pyi
@@ -11,15 +11,14 @@ from types import ModuleType, TracebackType
 
 
 _T = TypeVar('_T')
-_FT = TypeVar('_FT', bound=Callable[[Any], Any])
+_FT = TypeVar('_FT', bound=Callable[..., Any])
 _E = TypeVar('_E', bound=Exception)
 
 
 def expectedFailure(func: _FT) -> _FT: ...
-# TODO: Once python/mypy#1551 is fixed, the following need _FT instead of Any
-def skip(reason: str) -> Callable[[Any], Any]: ...
-def skipIf(condition: object, reason: str) -> Callable[[Any], Any]: ...
-def skipUnless(condition: object, reason: str) -> Callable[[Any], Any]: ...
+def skip(reason: str) -> Callable[[_FT], _FT]: ...
+def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: ...
+def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: ...
 
 class SkipTest(Exception):
     def __init__(self, reason: str) -> None: ...
diff --git a/typeshed/stdlib/3/unittest/mock.pyi b/typeshed/stdlib/3/unittest/mock.pyi
index 38b4669..2002f89 100644
--- a/typeshed/stdlib/3/unittest/mock.pyi
+++ b/typeshed/stdlib/3/unittest/mock.pyi
@@ -31,9 +31,14 @@ if sys.version_info >= (3, 3):
     class Base:
         def __init__(self, *args: Any, **kwargs: Any) -> None: ...
 
-    class NonCallableMock(Any):
+    # TODO: Get rid of the # type: ignore below.
+    # It is currently required to shut up mypy when run with `--strict`
+    # or `--disallow-subclassing-any`. The `Any` base class is currently
+    # the only way to allow passing an instance of `Mock` to functions
+    # expecting other classes (as is Mock's purpose)
+    class NonCallableMock(Any):  # type: ignore
         def __new__(cls, *args: Any, **kw: Any) -> Any: ...
-        def __init__(self, spec: Optional[Any] = None, wraps: Optional[Any] = None, name: Optional[Any] = None, spec_set: Optional[Any] = None, parent: Optional[Any] = None, _spec_state: Optional[Any] = None, _new_name: Any ='', _new_parent: Optional[Any] = None, _spec_as_instance: Any = False, _eat_self: Optional[Any] = None, unsafe: Any = False, **kwargs: Any) -> None: ...
+        def __init__(self, spec: Optional[Any] = None, wraps: Optional[Any] = None, name: Optional[Any] = None, spec_set: Optional[Any] = None, parent: Optional[Any] = None, _spec_state: Optional[Any] = None, _new_name: Any = ..., _new_parent: Optional[Any] = None, _spec_as_instance: Any = False, _eat_self: Optional[Any] = None, unsafe: Any = False, **kwargs: Any) -> None: ...
         def attach_mock(self, mock: Any, attribute: Any) -> Any: ...
         def mock_add_spec(self, spec: Any, spec_set: Any = False) -> Any: ...
         return_value = ...  # type: Any
@@ -59,7 +64,7 @@ if sys.version_info >= (3, 3):
 
     class CallableMixin(Base):
         side_effect = ...  # type: Any
-        def __init__(self, spec: Optional[Any] = None, side_effect: Optional[Any] = None, return_value: Any = ..., wraps: Optional[Any] = None, name: Optional[Any] = None, spec_set: Optional[Any] = None, parent: Optional[Any] = None, _spec_state: Optional[Any] = None, _new_name: Any = '', _new_parent: Optional[Any] = None, **kwargs: Any) -> None: ...
+        def __init__(self, spec: Optional[Any] = None, side_effect: Optional[Any] = None, return_value: Any = ..., wraps: Optional[Any] = None, name: Optional[Any] = None, spec_set: Optional[Any] = None, parent: Optional[Any] = None, _spec_state: Optional[Any] = None, _new_name: Any = ..., _new_parent: Optional[Any] = None, **kwargs: Any) -> None: ...
         def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ...
 
     class Mock(CallableMixin, NonCallableMock):
@@ -162,7 +167,7 @@ if sys.version_info >= (3, 3):
         name = ...  # type: Any
         def __init__(self, spec: Any, spec_set: Any = False, parent: Optional[Any] = None, name: Optional[Any] = None, ids: Optional[Any] = None, instance: Any = False) -> None: ...
 
-    def mock_open(mock: Optional[Any] = None, read_data: Any = '') -> Any: ...
+    def mock_open(mock: Optional[Any] = None, read_data: Any = ...) -> Any: ...
 
     class PropertyMock(Mock):
         def __get__(self, obj: Any, obj_type: Any) -> Any: ...
diff --git a/typeshed/stdlib/3/urllib/parse.pyi b/typeshed/stdlib/3/urllib/parse.pyi
index 637fa5f..c8f4c11 100644
--- a/typeshed/stdlib/3/urllib/parse.pyi
+++ b/typeshed/stdlib/3/urllib/parse.pyi
@@ -4,23 +4,7 @@ import sys
 
 _Str = Union[bytes, str]
 
-__all__ = (
-    'urlparse',
-    'urlunparse',
-    'urljoin',
-    'urldefrag',
-    'urlsplit',
-    'urlunsplit',
-    'urlencode',
-    'parse_qs',
-    'parse_qsl',
-    'quote',
-    'quote_plus',
-    'quote_from_bytes',
-    'unquote',
-    'unquote_plus',
-    'unquote_to_bytes'
-)
+__all__ = ...  # type: Tuple[str]
 
 uses_relative = ...  # type: List[str]
 uses_netloc = ...  # type: List[str]
diff --git a/typeshed/tests/mypy_selftest.py b/typeshed/tests/mypy_selftest.py
index 4ee98f7..edc1587 100755
--- a/typeshed/tests/mypy_selftest.py
+++ b/typeshed/tests/mypy_selftest.py
@@ -19,7 +19,7 @@ if __name__ == '__main__':
         shutil.copytree('stdlib', str(dirpath / 'mypy/typeshed/stdlib'))
         shutil.copytree('third_party', str(dirpath / 'mypy/typeshed/third_party'))
         try:
-            subprocess.run(['./runtests.py'], cwd=str(dirpath / 'mypy'), check=True)
+            subprocess.run(['./runtests.py', '-j12'], cwd=str(dirpath / 'mypy'), check=True)
         except subprocess.CalledProcessError as e:
             print('mypy tests failed', file=sys.stderr)
             sys.exit(e.returncode)
diff --git a/typeshed/third_party/2/simplejson/__init__.pyi b/typeshed/third_party/2/simplejson/__init__.pyi
index 5f94eb9..4565543 100644
--- a/typeshed/third_party/2/simplejson/__init__.pyi
+++ b/typeshed/third_party/2/simplejson/__init__.pyi
@@ -1,8 +1,8 @@
 from typing import Any, IO
 
-from simplejson.scanner import JSONDecodeError
-from simplejson.decoder import JSONDecoder
-from simplejson.encoder import JSONEncoder, JSONEncoderForHTML
+from simplejson.scanner import JSONDecodeError as JSONDecodeError
+from simplejson.decoder import JSONDecoder as JSONDecoder
+from simplejson.encoder import JSONEncoder as JSONEncoder, JSONEncoderForHTML as JSONEncoderForHTML
 
 def dumps(obj: Any, *args: Any, **kwds: Any) -> str: ...
 def dump(obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ...
diff --git a/typeshed/third_party/2/six/__init__.pyi b/typeshed/third_party/2/six/__init__.pyi
index c4c4723..bd8d97a 100644
--- a/typeshed/third_party/2/six/__init__.pyi
+++ b/typeshed/third_party/2/six/__init__.pyi
@@ -82,7 +82,7 @@ def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[Any
 
 def reraise(tp: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[types.TracebackType] = ...) -> NoReturn: ...
 def exec_(_code_: Union[unicode, types.CodeType], _globs_: Dict[str, Any] = ..., _locs_: Dict[str, Any] = ...): ...
-def raise_from(value: BaseException, from_value: BaseException) -> None: ...
+def raise_from(value: BaseException, from_value: Optional[BaseException]) -> NoReturn: ...
 
 print_ = print
 
diff --git a/typeshed/third_party/2and3/click/__init__.pyi b/typeshed/third_party/2and3/click/__init__.pyi
index c4228fc..2ac61be 100644
--- a/typeshed/third_party/2and3/click/__init__.pyi
+++ b/typeshed/third_party/2and3/click/__init__.pyi
@@ -15,40 +15,98 @@
 """
 
 # Core classes
-from .core import Context, BaseCommand, Command, MultiCommand, Group, \
-    CommandCollection, Parameter, Option, Argument
+from .core import (
+    Context as Context,
+    BaseCommand as BaseCommand,
+    Command as Command,
+    MultiCommand as MultiCommand,
+    Group as Group,
+    CommandCollection as CommandCollection,
+    Parameter as Parameter,
+    Option as Option,
+    Argument as Argument,
+)
 
 # Globals
-from .globals import get_current_context
+from .globals import get_current_context as get_current_context
 
 # Decorators
-from .decorators import pass_context, pass_obj, make_pass_decorator, \
-    command, group, argument, option, confirmation_option, \
-    password_option, version_option, help_option
+from .decorators import (
+    pass_context as pass_context,
+    pass_obj as pass_obj,
+    make_pass_decorator as make_pass_decorator,
+    command as command,
+    group as group,
+    argument as argument,
+    option as option,
+    confirmation_option as confirmation_option,
+    password_option as password_option,
+    version_option as version_option,
+    help_option as help_option,
+)
 
 # Types
-from .types import ParamType, File, Path, Choice, IntRange, Tuple, \
-    STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED
+from .types import (
+    ParamType as ParamType,
+    File as File,
+    Path as Path,
+    Choice as Choice,
+    IntRange as IntRange,
+    Tuple as Tuple,
+    STRING as STRING,
+    INT as INT,
+    FLOAT as FLOAT,
+    BOOL as BOOL,
+    UUID as UUID,
+    UNPROCESSED as UNPROCESSED,
+)
 
 # Utilities
-from .utils import echo, get_binary_stream, get_text_stream, open_file, \
-    format_filename, get_app_dir, get_os_args
+from .utils import (
+    echo as echo,
+    get_binary_stream as get_binary_stream,
+    get_text_stream as get_text_stream,
+    open_file as open_file,
+    format_filename as format_filename,
+    get_app_dir as get_app_dir,
+    get_os_args as get_os_args,
+)
 
 # Terminal functions
-from .termui import prompt, confirm, get_terminal_size, echo_via_pager, \
-    progressbar, clear, style, unstyle, secho, edit, launch, getchar, \
-    pause
+from .termui import (
+    prompt as prompt,
+    confirm as confirm,
+    get_terminal_size as get_terminal_size,
+    echo_via_pager as echo_via_pager,
+    progressbar as progressbar,
+    clear as clear,
+    style as style,
+    unstyle as unstyle,
+    secho as secho,
+    edit as edit,
+    launch as launch,
+    getchar as getchar,
+    pause as pause,
+)
 
 # Exceptions
-from .exceptions import ClickException, UsageError, BadParameter, \
-    FileError, Abort, NoSuchOption, BadOptionUsage, BadArgumentUsage, \
-    MissingParameter
+from .exceptions import (
+    ClickException as ClickException,
+    UsageError as UsageError,
+    BadParameter as BadParameter,
+    FileError as FileError,
+    Abort as Abort,
+    NoSuchOption as NoSuchOption,
+    BadOptionUsage as BadOptionUsage,
+    BadArgumentUsage as BadArgumentUsage,
+    MissingParameter as MissingParameter,
+)
 
 # Formatting
-from .formatting import HelpFormatter, wrap_text
+from .formatting import HelpFormatter as HelpFormatter, wrap_text as wrap_text
 
 # Parsing
-from .parser import OptionParser
+from .parser import OptionParser as OptionParser
 
 
 __all__ = [
diff --git a/typeshed/third_party/2and3/click/globals.pyi b/typeshed/third_party/2and3/click/globals.pyi
index b5a113e..816d393 100644
--- a/typeshed/third_party/2and3/click/globals.pyi
+++ b/typeshed/third_party/2and3/click/globals.pyi
@@ -1,4 +1,5 @@
-from click.core import Optional, Context
+from click.core import Context
+from typing import Optional
 
 
 def get_current_context(silent: bool = False) -> Context:
diff --git a/typeshed/third_party/2and3/emoji.pyi b/typeshed/third_party/2and3/emoji.pyi
new file mode 100644
index 0000000..53fb579
--- /dev/null
+++ b/typeshed/third_party/2and3/emoji.pyi
@@ -0,0 +1,18 @@
+from typing import Tuple, Pattern, List, Dict, Union
+
+_DEFAULT_DELIMITER = ...  # type: str
+
+def emojize(
+    string: str,
+    use_aliases: bool=...,
+    delimiters: Tuple[str, str]=...
+) -> str: ...
+
+def demojize(
+    string: str,
+    delimiters: Tuple[str, str]=...
+) -> str: ...
+
+def get_emoji_regexp() -> Pattern: ...
+
+def emoji_lis(string: str) -> List[Dict[str, Union[int, str]]]: ...
diff --git a/typeshed/third_party/2and3/jinja2/__init__.pyi b/typeshed/third_party/2and3/jinja2/__init__.pyi
index 96a5249..063f73d 100644
--- a/typeshed/third_party/2and3/jinja2/__init__.pyi
+++ b/typeshed/third_party/2and3/jinja2/__init__.pyi
@@ -4,4 +4,4 @@ from jinja2.bccache import BytecodeCache as BytecodeCache, FileSystemBytecodeCac
 from jinja2.runtime import Undefined as Undefined, DebugUndefined as DebugUndefined, StrictUndefined as StrictUndefined, make_logging_undefined as make_logging_undefined
 from jinja2.exceptions import TemplateError as TemplateError, UndefinedError as UndefinedError, TemplateNotFound as TemplateNotFound, TemplatesNotFound as TemplatesNotFound, TemplateSyntaxError as TemplateSyntaxError, TemplateAssertionError as TemplateAssertionError
 from jinja2.filters import environmentfilter as environmentfilter, contextfilter as contextfilter, evalcontextfilter as evalcontextfilter
-from jinja2.utils import Markup as Markup, escape as escape, clear_caches as clear_caches, environmentfunction as environmentfunction, evalcontextfunction as evalcontextfunction, contextfunction as contextfunction, is_undefined as is_undefined
+from jinja2.utils import Markup as Markup, escape as escape, clear_caches as clear_caches, environmentfunction as environmentfunction, evalcontextfunction as evalcontextfunction, contextfunction as contextfunction, is_undefined as is_undefined, select_autoescape as select_autoescape
diff --git a/typeshed/third_party/2and3/jinja2/environment.pyi b/typeshed/third_party/2and3/jinja2/environment.pyi
index 5c58651..909211c 100644
--- a/typeshed/third_party/2and3/jinja2/environment.pyi
+++ b/typeshed/third_party/2and3/jinja2/environment.pyi
@@ -42,7 +42,7 @@ class Environment:
     bytecode_cache = ...  # type: BytecodeCache
     auto_reload = ...  # type: bool
     extensions = ...  # type: List
-    def __init__(self, block_start_string: Text = ..., block_end_string: Text = ..., variable_start_string: Text = ..., variable_end_string: Text = ..., comment_start_string: Any = ..., comment_end_string: Text = ..., line_statement_prefix: Text = ..., line_comment_prefix: Text = ..., trim_blocks: bool = ..., lstrip_blocks: bool = ..., newline_sequence: Text = ..., keep_trailing_newline: bool = ..., extensions: List = ..., optimized: bool = ..., undefined: Type[Undefined] = ..., finalize [...]
+    def __init__(self, block_start_string: Text = ..., block_end_string: Text = ..., variable_start_string: Text = ..., variable_end_string: Text = ..., comment_start_string: Any = ..., comment_end_string: Text = ..., line_statement_prefix: Text = ..., line_comment_prefix: Text = ..., trim_blocks: bool = ..., lstrip_blocks: bool = ..., newline_sequence: Text = ..., keep_trailing_newline: bool = ..., extensions: List = ..., optimized: bool = ..., undefined: Type[Undefined] = ..., finalize [...]
     def add_extension(self, extension): ...
     def extend(self, **attributes): ...
     def overlay(self, block_start_string: Text = ..., block_end_string: Text = ..., variable_start_string: Text = ..., variable_end_string: Text = ..., comment_start_string: Any = ..., comment_end_string: Text = ..., line_statement_prefix: Text = ..., line_comment_prefix: Text = ..., trim_blocks: bool = ..., lstrip_blocks: bool = ..., extensions: List = ..., optimized: bool = ..., undefined: Type[Undefined] = ..., finalize: Callable = ..., autoescape: bool = ..., loader: Optional[BaseLoa [...]
diff --git a/typeshed/third_party/2and3/jinja2/utils.pyi b/typeshed/third_party/2and3/jinja2/utils.pyi
index 7a0f1a9..196bab3 100644
--- a/typeshed/third_party/2and3/jinja2/utils.pyi
+++ b/typeshed/third_party/2and3/jinja2/utils.pyi
@@ -1,6 +1,6 @@
-from typing import Any, Optional
+from typing import Any, Callable, Iterable, Optional
 
-from markupsafe import Markup, escape, soft_unicode
+from markupsafe import Markup as Markup, escape as escape, soft_unicode as soft_unicode
 
 missing = ...  # type: Any
 internal_code = ...  # type: Any
@@ -11,6 +11,7 @@ def evalcontextfunction(f): ...
 def environmentfunction(f): ...
 def internalcode(f): ...
 def is_undefined(obj): ...
+def select_autoescape(enabled_extensions: Iterable[str] = ..., disabled_extensions: Iterable[str] = ..., default_for_string: bool = ..., default: bool = ...) -> Callable[[str], bool]: ...
 def consume(iterable): ...
 def clear_caches(): ...
 def import_string(import_name, silent: bool = ...): ...
diff --git a/typeshed/third_party/2and3/pynamodb/attributes.pyi b/typeshed/third_party/2and3/pynamodb/attributes.pyi
index 1b2c2d8..d450d0e 100644
--- a/typeshed/third_party/2and3/pynamodb/attributes.pyi
+++ b/typeshed/third_party/2and3/pynamodb/attributes.pyi
@@ -39,8 +39,8 @@ class UnicodeSetAttribute(SetMixin, Attribute[Set[Text]]):
 class UnicodeAttribute(Attribute[Text]):
     def __get__(self, instance: Any, owner: Any) -> Text: ...
 
-class JSONAttribute(Attribute[Dict[Text, Any]]):
-    def __get__(self, instance: Any, owner: Any) -> Dict[Text, Any]: ...
+class JSONAttribute(Attribute[Any]):
+    def __get__(self, instance: Any, owner: Any) -> Any: ...
 
 class LegacyBooleanAttribute(Attribute[bool]):
     def __get__(self, instance: Any, owner: Any) -> bool: ...
diff --git a/typeshed/third_party/2and3/pytz/lazy.pyi b/typeshed/third_party/2and3/pytz/lazy.pyi
index 3618333..795ed04 100644
--- a/typeshed/third_party/2and3/pytz/lazy.pyi
+++ b/typeshed/third_party/2and3/pytz/lazy.pyi
@@ -1,13 +1,14 @@
-# Stubs for pytz.lazy (Python 3.5)
-
-from typing import Any, Iterable, List, Set, Dict  # NOQA
+from typing import Iterator, List, Set, TypeVar
 from collections import Mapping
 
-class LazyDict(Mapping):
-    pass
+_T = TypeVar('_T')
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
 
-class LazyList(List):
-    pass
+class LazyDict(Mapping[_KT, _VT]):
+    def __getitem__(self, key: _KT) -> _VT: ...
+    def __iter__(self) -> Iterator[_KT]: ...
+    def __len__(self) -> int: ...
 
-class LazySet(Set):
-    pass
+class LazyList(List[_T]): ...
+class LazySet(Set[_T]): ...
diff --git a/typeshed/third_party/2and3/requests/adapters.pyi b/typeshed/third_party/2and3/requests/adapters.pyi
index 81d0212..7e6167c 100644
--- a/typeshed/third_party/2and3/requests/adapters.pyi
+++ b/typeshed/third_party/2and3/requests/adapters.pyi
@@ -56,7 +56,7 @@ class HTTPAdapter(BaseAdapter):
     config = ...  # type: Any
     proxy_manager = ...  # type: Any
     def __init__(self, pool_connections=..., pool_maxsize=..., max_retries=...,
-                 pool_block=...): ...
+                 pool_block=...) -> None: ...
     poolmanager = ...  # type: Any
     def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs): ...
     def proxy_manager_for(self, proxy, **proxy_kwargs): ...
diff --git a/typeshed/third_party/2and3/requests/auth.pyi b/typeshed/third_party/2and3/requests/auth.pyi
index 8eea2b0..b73aa70 100644
--- a/typeshed/third_party/2and3/requests/auth.pyi
+++ b/typeshed/third_party/2and3/requests/auth.pyi
@@ -1,6 +1,6 @@
 # Stubs for requests.auth (Python 3)
 
-from typing import Any
+from typing import Any, Text, Union
 from . import compat
 from . import cookies
 from . import utils
@@ -14,6 +14,8 @@ codes = status_codes.codes
 CONTENT_TYPE_FORM_URLENCODED = ...  # type: Any
 CONTENT_TYPE_MULTI_PART = ...  # type: Any
 
+def _basic_auth_str(username: Union[bytes, Text], password: Union[bytes, Text]) -> str: ...
+
 class AuthBase:
     def __call__(self, r): ...
 
diff --git a/typeshed/third_party/2and3/requests/models.pyi b/typeshed/third_party/2and3/requests/models.pyi
index 265670f..18b1689 100644
--- a/typeshed/third_party/2and3/requests/models.pyi
+++ b/typeshed/third_party/2and3/requests/models.pyi
@@ -76,7 +76,7 @@ class Request(RequestHooksMixin):
     auth = ...  # type: Any
     cookies = ...  # type: Any
     def __init__(self, method=..., url=..., headers=..., files=..., data=..., params=...,
-                 auth=..., cookies=..., hooks=..., json=...): ...
+                 auth=..., cookies=..., hooks=..., json=...) -> None: ...
     def prepare(self): ...
 
 class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
diff --git a/typeshed/third_party/2and3/requests/structures.pyi b/typeshed/third_party/2and3/requests/structures.pyi
index 53af72d..ad9044f 100644
--- a/typeshed/third_party/2and3/requests/structures.pyi
+++ b/typeshed/third_party/2and3/requests/structures.pyi
@@ -1,9 +1,12 @@
-# Stubs for requests.structures (Python 3)
-
 from typing import Any, Iterator, MutableMapping, Text, Tuple, Union
 
 class CaseInsensitiveDict(MutableMapping[str, Union[Text, bytes]]):
     def lower_items(self) -> Iterator[Tuple[str, Union[Text, bytes]]]: ...
+    def __setitem__(self, key: str, value: Union[Text, bytes]) -> None: ...
+    def __getitem__(self, key: str) -> Union[Text, bytes]: ...
+    def __delitem__(self, key: str) -> None: ...
+    def __iter__(self) -> Iterator[str]: ...
+    def __len__(self) -> int: ...
 
 class LookupDict(dict):
     name = ...  # type: Any
diff --git a/typeshed/third_party/2and3/typing_extensions.pyi b/typeshed/third_party/2and3/typing_extensions.pyi
new file mode 100644
index 0000000..23cb4af
--- /dev/null
+++ b/typeshed/third_party/2and3/typing_extensions.pyi
@@ -0,0 +1,33 @@
+import sys
+import typing
+from typing import ClassVar as ClassVar
+from typing import ContextManager as ContextManager
+from typing import Counter as Counter
+from typing import DefaultDict as DefaultDict
+from typing import Deque as Deque
+from typing import NewType as NewType
+from typing import NoReturn as NoReturn
+from typing import overload as overload
+from typing import Text as Text
+from typing import Type as Type
+from typing import TYPE_CHECKING as TYPE_CHECKING
+from typing import TypeVar, Any
+
+_TC = TypeVar('_TC', bound=Type[object])
+class _SpecialForm:
+    def __getitem__(self, typeargs: Any) -> Any: ...
+def runtime(cls: _TC) -> _TC: ...
+Protocol: _SpecialForm = ...
+
+if sys.version_info >= (3, 3):
+    from typing import ChainMap as ChainMap
+
+if sys.version_info >= (3, 5):
+    from typing import AsyncIterable as AsyncIterable
+    from typing import AsyncIterator as AsyncIterator
+    from typing import AsyncContextManager as AsyncContextManager
+    from typing import Awaitable as Awaitable
+    from typing import Coroutine as Coroutine
+
+if sys.version_info >= (3, 6):
+    from typing import AsyncGenerator as AsyncGenerator
diff --git a/typeshed/third_party/3/enum.pyi b/typeshed/third_party/3/enum.pyi
index dbb9df3..1501be2 100644
--- a/typeshed/third_party/3/enum.pyi
+++ b/typeshed/third_party/3/enum.pyi
@@ -1,10 +1,15 @@
 import sys
 from typing import List, Any, TypeVar, Union, Iterable, Iterator, TypeVar, Generic, Type, Sized, Reversible, Container, Mapping
+from abc import ABCMeta
 
 _T = TypeVar('_T', bound=Enum)
 _S = TypeVar('_S', bound=Type[Enum])
 
-class EnumMeta(type, Iterable[Enum], Sized, Reversible[Enum], Container[Enum]):
+# Note: EnumMeta actually subclasses type directly, not ABCMeta.
+# This is a temporary workaround to allow multiple creation of enums with builtins
+# such as str as mixins, which due to the handling of ABCs of builtin types, cause
+# spurious inconsistent metaclass structure. See #1595.
+class EnumMeta(ABCMeta, Iterable[Enum], Sized, Reversible[Enum], Container[Enum]):
     def __iter__(self: Type[_T]) -> Iterator[_T]: ...
     def __reversed__(self: Type[_T]) -> Iterator[_T]: ...
     def __contains__(self, member: Any) -> bool: ...
diff --git a/typeshed/third_party/3/lxml/etree.pyi b/typeshed/third_party/3/lxml/etree.pyi
index e1bab54..958c82a 100644
--- a/typeshed/third_party/3/lxml/etree.pyi
+++ b/typeshed/third_party/3/lxml/etree.pyi
@@ -3,7 +3,7 @@
 # Any use of `Any` below means I couldn't figure out the type.
 
 import typing
-from typing import Any, Dict, List, MutableMapping, Tuple, Union
+from typing import Any, Dict, List, MutableMapping, Tuple, Union, Optional
 from typing import Iterable, Iterator, SupportsBytes
 
 
@@ -23,7 +23,12 @@ class ElementChildIterator(Iterator['_Element']):
 
 class _Element(Iterable['_Element']):
     def addprevious(self, element: '_Element') -> None: ...
-
+    def addnext(self, element: '_Element') -> None: ...
+    def clear(self) -> None: ...
+    def get(self, key: _AnyStr, default: Optional[_AnyStr] = ...) -> _AnyStr: ...
+    def xpath(self, _path: _AnyStr, namespaces: Optional[_DictAnyStr] = ..., extensions: Any = ..., smart_strings: bool = ..., **_variables: Any) -> Any: ...
+    # indeed returns a Union[bool, float, _AnyStr, List[Union[ElementBase, _AnyStr, Tuple[]]]]: ...
+    # http://lxml.de/xpathxslt.html#xpath-return-values
     attrib = ...  # type: MutableMapping[str, str]
     text = ...  # type: _AnyStr
     tag = ...  # type: str
@@ -45,6 +50,7 @@ class _ElementTree:
               exclusive: bool = ...,
               with_comments: bool = ...,
               inclusive_ns_prefixes: _ListAnyStr = ...) -> None: ...
+    def xpath(self, _path: _AnyStr, namespaces: Optional[_DictAnyStr] = ..., extensions: Any = ..., smart_strings: bool = ..., **_variables: Any) -> Any: ...
 
 class _XSLTResultTree(SupportsBytes): ...
 
diff --git a/typeshed/third_party/3/six/__init__.pyi b/typeshed/third_party/3/six/__init__.pyi
index 80f6766..0631a69 100644
--- a/typeshed/third_party/3/six/__init__.pyi
+++ b/typeshed/third_party/3/six/__init__.pyi
@@ -94,7 +94,7 @@ def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: Union[Any
 exec_ = exec
 
 def reraise(tp: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[types.TracebackType] = None) -> NoReturn: ...
-def raise_from(value: BaseException, from_value: BaseException) -> None: ...
+def raise_from(value: BaseException, from_value: Optional[BaseException]) -> NoReturn: ...
 
 print_ = print
 
diff --git a/xml/mypy-html.xslt b/xml/mypy-html.xslt
index 2e7ed51..ddd78c2 100644
--- a/xml/mypy-html.xslt
+++ b/xml/mypy-html.xslt
@@ -68,7 +68,7 @@
               <td class="table-code">
                 <pre>
                   <xsl:for-each select="line">
-                    <span class="line-{@precision}"><xsl:value-of select="@content"/></span><xsl:text>
</xsl:text>
+                    <span class="line-{@precision}" title="{@any_info}"><xsl:value-of select="@content"/></span><xsl:text>
</xsl:text>
                   </xsl:for-each>
                 </pre>
               </td>
diff --git a/xml/mypy.xsd b/xml/mypy.xsd
index 83d0832..77d0737 100644
--- a/xml/mypy.xsd
+++ b/xml/mypy.xsd
@@ -38,6 +38,7 @@
           <xs:complexType>
             <xs:attribute name="number" type="xs:integer" use="required"/>
             <xs:attribute name="precision" type="precision" use="required"/>
+            <xs:attribute name="any_info" type="xs:string" use="optional"/>
             <xs:attribute name="content" type="xs:string" use="required"/>
           </xs:complexType>
         </xs:element>

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git



More information about the debian-med-commit mailing list