[med-svn] [mypy] 01/01: New upstream version 0.521

Michael Crusoe misterc-guest at moszumanska.debian.org
Sat Oct 7 17:16:03 UTC 2017


This is an automated email from the git hooks/post-receive script.

misterc-guest pushed a commit to annotated tag upstream/0.521
in repository mypy.

commit d434eaa4a5773d6e2376fbfcff556ee3e75bae78
Author: Michael R. Crusoe <michael.crusoe at gmail.com>
Date:   Tue Jul 25 11:19:03 2017 -0700

    New upstream version 0.521
---
 .gitignore                                         |   30 +
 .gitmodules                                        |    3 +
 .travis.yml                                        |   23 +
 CONTRIBUTING.md                                    |  163 +
 CREDITS                                            |  101 +
 LICENSE                                            |  227 ++
 PKG-INFO                                           |    2 +-
 README.md                                          |  268 ++
 ROADMAP.md                                         |   96 +
 appveyor.yml                                       |   41 +
 build-requirements.txt                             |    2 +
 conftest.py                                        |   13 +
 docs/Makefile                                      |  177 +
 docs/README.md                                     |   49 +
 docs/make.bat                                      |  242 ++
 docs/requirements-docs.txt                         |    2 +
 docs/source/additional_features.rst                |    9 +
 docs/source/basics.rst                             |  194 ++
 docs/source/builtin_types.rst                      |   37 +
 docs/source/casts.rst                              |   39 +
 docs/source/cheat_sheet.rst                        |  254 ++
 docs/source/cheat_sheet_py3.rst                    |  307 ++
 docs/source/class_basics.rst                       |  157 +
 docs/source/command_line.rst                       |  484 +++
 docs/source/common_issues.rst                      |  432 +++
 docs/source/conf.py                                |  268 ++
 docs/source/config_file.rst                        |  210 ++
 docs/source/duck_type_compatibility.rst            |   40 +
 docs/source/dynamic_typing.rst                     |   86 +
 docs/source/faq.rst                                |  270 ++
 docs/source/function_overloading.rst               |   92 +
 docs/source/generics.rst                           |  541 +++
 docs/source/getting_started.rst                    |   24 +
 docs/source/index.rst                              |   42 +
 docs/source/introduction.rst                       |   30 +
 docs/source/kinds_of_types.rst                     | 1359 ++++++++
 docs/source/python2.rst                            |  130 +
 docs/source/python36.rst                           |   96 +
 docs/source/revision_history.rst                   |  245 ++
 docs/source/supported_python_features.rst          |   20 +
 docs/source/type_inference_and_annotations.rst     |  172 +
 extensions/README.md                               |    6 +
 extensions/mypy_extensions.py                      |  137 +
 extensions/setup.py                                |   45 +
 misc/actions_stubs.py                              |  111 +
 misc/analyze_cache.py                              |  189 ++
 misc/async_matrix.py                               |  120 +
 misc/fix_annotate.py                               |  219 ++
 misc/incremental_checker.py                        |  356 ++
 misc/macs.el                                       |   22 +
 misc/perf_checker.py                               |   93 +
 misc/remove-eol-whitespace.sh                      |    8 +
 misc/test_case_to_actual.py                        |   71 +
 misc/touch_checker.py                              |  151 +
 misc/upload-pypi.py                                |  160 +
 misc/variadics.py                                  |   54 +
 mypy.egg-info/PKG-INFO                             |    2 +-
 mypy.egg-info/SOURCES.txt                          |  301 ++
 mypy.egg-info/requires.txt                         |    2 +-
 mypy/build.py                                      |   80 +-
 mypy/checker.py                                    |    7 +-
 mypy/fastparse.py                                  |    2 +-
 mypy/fastparse2.py                                 |    2 +-
 mypy/fixup.py                                      |    3 +
 mypy/myunit/__init__.py                            |  383 +++
 mypy/myunit/__main__.py                            |   18 +
 mypy/nodes.py                                      |   14 +-
 mypy/options.py                                    |    3 +-
 mypy/semanal.py                                    |   17 +-
 mypy/server/__init__.py                            |    0
 mypy/server/astdiff.py                             |  209 ++
 mypy/server/astmerge.py                            |  222 ++
 mypy/server/aststrip.py                            |   99 +
 mypy/server/deps.py                                |  232 ++
 mypy/server/subexpr.py                             |  144 +
 mypy/server/target.py                              |   17 +
 mypy/server/trigger.py                             |    5 +
 mypy/server/update.py                              |  425 +++
 mypy/test/__init__.py                              |    0
 mypy/test/collect.py                               |    0
 mypy/test/config.py                                |   19 +
 mypy/test/data.py                                  |  564 ++++
 mypy/test/helpers.py                               |  309 ++
 mypy/test/testargs.py                              |   18 +
 mypy/test/testcheck.py                             |  364 ++
 mypy/test/testcmdline.py                           |  112 +
 mypy/test/testdeps.py                              |   67 +
 mypy/test/testdiff.py                              |   75 +
 mypy/test/testextensions.py                        |  143 +
 mypy/test/testfinegrained.py                       |  120 +
 mypy/test/testgraph.py                             |   76 +
 mypy/test/testinfer.py                             |  223 ++
 mypy/test/testmerge.py                             |  206 ++
 mypy/test/testmoduleinfo.py                        |   14 +
 mypy/test/testparse.py                             |   79 +
 mypy/test/testpythoneval.py                        |  128 +
 mypy/test/testreports.py                           |   40 +
 mypy/test/testsemanal.py                           |  227 ++
 mypy/test/testsolve.py                             |  132 +
 mypy/test/teststubgen.py                           |  204 ++
 mypy/test/testsubtypes.py                          |  207 ++
 mypy/test/testtransform.py                         |   88 +
 mypy/test/testtypegen.py                           |  128 +
 mypy/test/testtypes.py                             |  806 +++++
 mypy/test/update.py                                |    0
 mypy/version.py                                    |    2 +-
 mypy_self_check.ini                                |    8 +
 pinfer/.gitignore                                  |    3 +
 pinfer/LICENSE                                     |   27 +
 pinfer/README                                      |   47 +
 pinfer/__init__.py                                 |    0
 pinfer/inspect3.py                                 |  122 +
 pinfer/p.py                                        |   83 +
 pinfer/pinfer.py                                   |  686 ++++
 pinfer/test_pinfer.py                              |  302 ++
 pinfer/test_pinfer3.py                             |   31 +
 pinfer/unparse.py                                  |  610 ++++
 pinfer/unparse3.py                                 |  610 ++++
 pytest.ini                                         |   21 +
 runtests.py                                        |  461 +++
 scripts/__pycache__/dumpmodule.cpython-36.pyc      |  Bin 0 -> 3853 bytes
 test-data/.flake8                                  |   22 +
 test-data/samples/bottles.py                       |   13 +
 test-data/samples/class.py                         |   18 +
 test-data/samples/cmdline.py                       |    8 +
 test-data/samples/crawl.py                         |  863 +++++
 test-data/samples/crawl2.py                        |  852 +++++
 test-data/samples/dict.py                          |    8 +
 test-data/samples/fib.py                           |    5 +
 test-data/samples/files.py                         |   14 +
 test-data/samples/for.py                           |    4 +
 test-data/samples/generators.py                    |   24 +
 test-data/samples/greet.py                         |    8 +
 test-data/samples/guess.py                         |   32 +
 test-data/samples/hello.py                         |    2 +
 test-data/samples/input.py                         |    3 +
 test-data/samples/itertool.py                      |   16 +
 test-data/samples/readme.txt                       |   25 +
 test-data/samples/regexp.py                        |    7 +
 test-data/stdlib-samples/3.2/base64.py             |  411 +++
 test-data/stdlib-samples/3.2/fnmatch.py            |  112 +
 test-data/stdlib-samples/3.2/genericpath.py        |  112 +
 test-data/stdlib-samples/3.2/getopt.py             |  220 ++
 test-data/stdlib-samples/3.2/glob.py               |   84 +
 .../3.2/incomplete/logging/__init__.py             | 1873 +++++++++++
 .../3.2/incomplete/urllib/__init__.py              |    0
 .../stdlib-samples/3.2/incomplete/urllib/parse.py  |  980 ++++++
 test-data/stdlib-samples/3.2/posixpath.py          |  466 +++
 test-data/stdlib-samples/3.2/pprint.py             |  380 +++
 test-data/stdlib-samples/3.2/random.py             |  743 ++++
 test-data/stdlib-samples/3.2/shutil.py             |  790 +++++
 test-data/stdlib-samples/3.2/subprocess.py         | 1704 ++++++++++
 test-data/stdlib-samples/3.2/tempfile.py           |  721 ++++
 test-data/stdlib-samples/3.2/test/__init__.py      |    0
 test-data/stdlib-samples/3.2/test/randv2_32.pck    |  633 ++++
 test-data/stdlib-samples/3.2/test/randv2_64.pck    |  633 ++++
 test-data/stdlib-samples/3.2/test/randv3.pck       |  633 ++++
 .../3.2/test/subprocessdata/fd_status.py           |   24 +
 .../3.2/test/subprocessdata/input_reader.py        |    7 +
 .../stdlib-samples/3.2/test/subprocessdata/qcat.py |    7 +
 .../3.2/test/subprocessdata/qgrep.py               |   10 +
 .../3.2/test/subprocessdata/sigchild_ignore.py     |    6 +
 test-data/stdlib-samples/3.2/test/support.py       | 1602 +++++++++
 test-data/stdlib-samples/3.2/test/test_base64.py   |  267 ++
 test-data/stdlib-samples/3.2/test/test_fnmatch.py  |   93 +
 .../stdlib-samples/3.2/test/test_genericpath.py    |  313 ++
 test-data/stdlib-samples/3.2/test/test_getopt.py   |  190 ++
 test-data/stdlib-samples/3.2/test/test_glob.py     |  122 +
 .../stdlib-samples/3.2/test/test_posixpath.py      |  531 +++
 test-data/stdlib-samples/3.2/test/test_pprint.py   |  488 +++
 test-data/stdlib-samples/3.2/test/test_random.py   |  533 +++
 test-data/stdlib-samples/3.2/test/test_set.py      | 1884 +++++++++++
 test-data/stdlib-samples/3.2/test/test_shutil.py   |  978 ++++++
 .../stdlib-samples/3.2/test/test_subprocess.py     | 1764 ++++++++++
 test-data/stdlib-samples/3.2/test/test_tempfile.py | 1122 +++++++
 test-data/stdlib-samples/3.2/test/test_textwrap.py |  601 ++++
 .../stdlib-samples/3.2/test/tf_inherit_check.py    |   25 +
 test-data/stdlib-samples/3.2/textwrap.py           |  391 +++
 test-data/unit/README.md                           |  180 +
 test-data/unit/check-abstract.test                 |  833 +++++
 test-data/unit/check-async-await.test              |  677 ++++
 test-data/unit/check-basic.test                    |  310 ++
 test-data/unit/check-bound.test                    |  203 ++
 test-data/unit/check-callable.test                 |  345 ++
 test-data/unit/check-class-namedtuple.test         |  669 ++++
 test-data/unit/check-classes.test                  | 3551 ++++++++++++++++++++
 test-data/unit/check-classvar.test                 |  266 ++
 test-data/unit/check-columns.test                  |   68 +
 test-data/unit/check-custom-plugin.test            |  148 +
 test-data/unit/check-dynamic-typing.test           |  676 ++++
 test-data/unit/check-enum.test                     |  397 +++
 test-data/unit/check-expressions.test              | 1735 ++++++++++
 test-data/unit/check-fastparse.test                |  366 ++
 test-data/unit/check-flags.test                    |  900 +++++
 test-data/unit/check-functions.test                | 2127 ++++++++++++
 test-data/unit/check-generic-subtyping.test        |  763 +++++
 test-data/unit/check-generics.test                 | 1749 ++++++++++
 test-data/unit/check-ignore.test                   |  218 ++
 test-data/unit/check-incomplete-fixture.test       |   98 +
 test-data/unit/check-incremental.test              | 2842 ++++++++++++++++
 test-data/unit/check-inference-context.test        |  897 +++++
 test-data/unit/check-inference.test                | 1889 +++++++++++
 test-data/unit/check-isinstance.test               | 1756 ++++++++++
 test-data/unit/check-kwargs.test                   |  397 +++
 test-data/unit/check-lists.test                    |   72 +
 test-data/unit/check-modules.test                  | 1642 +++++++++
 test-data/unit/check-multiple-inheritance.test     |  242 ++
 test-data/unit/check-namedtuple.test               |  467 +++
 test-data/unit/check-newsyntax.test                |  153 +
 test-data/unit/check-newtype.test                  |  346 ++
 test-data/unit/check-optional.test                 |  615 ++++
 test-data/unit/check-overloading.test              | 1159 +++++++
 test-data/unit/check-python2.test                  |  310 ++
 test-data/unit/check-selftype.test                 |  378 +++
 test-data/unit/check-semanal-error.test            |   97 +
 test-data/unit/check-serialize.test                | 1249 +++++++
 test-data/unit/check-statements.test               | 1559 +++++++++
 test-data/unit/check-super.test                    |  117 +
 test-data/unit/check-tuples.test                   |  943 ++++++
 test-data/unit/check-type-aliases.test             |  114 +
 test-data/unit/check-type-checks.test              |  113 +
 test-data/unit/check-type-promotion.test           |   39 +
 test-data/unit/check-typeddict.test                | 1258 +++++++
 test-data/unit/check-typevar-values.test           |  517 +++
 test-data/unit/check-underscores.test              |   16 +
 test-data/unit/check-unions.test                   |  498 +++
 test-data/unit/check-unreachable-code.test         |  536 +++
 test-data/unit/check-unsupported.test              |   17 +
 test-data/unit/check-varargs.test                  |  595 ++++
 test-data/unit/check-warnings.test                 |  184 +
 test-data/unit/cmdline.test                        | 1033 ++++++
 test-data/unit/deps.test                           |  281 ++
 test-data/unit/diff.test                           |  267 ++
 test-data/unit/fine-grained.test                   |  872 +++++
 test-data/unit/fixtures/__new__.pyi                |   14 +
 test-data/unit/fixtures/alias.pyi                  |   12 +
 test-data/unit/fixtures/args.pyi                   |   29 +
 test-data/unit/fixtures/async_await.pyi            |   20 +
 test-data/unit/fixtures/bool.pyi                   |   12 +
 test-data/unit/fixtures/callable.pyi               |   26 +
 test-data/unit/fixtures/classmethod.pyi            |   26 +
 test-data/unit/fixtures/complex.pyi                |   11 +
 test-data/unit/fixtures/dict.pyi                   |   44 +
 test-data/unit/fixtures/exception.pyi              |   13 +
 test-data/unit/fixtures/f_string.pyi               |   36 +
 test-data/unit/fixtures/fine_grained.pyi           |   26 +
 test-data/unit/fixtures/float.pyi                  |   31 +
 test-data/unit/fixtures/floatdict.pyi              |   63 +
 test-data/unit/fixtures/for.pyi                    |   19 +
 test-data/unit/fixtures/function.pyi               |    7 +
 test-data/unit/fixtures/isinstance.pyi             |   24 +
 test-data/unit/fixtures/isinstancelist.pyi         |   47 +
 test-data/unit/fixtures/list.pyi                   |   32 +
 test-data/unit/fixtures/module.pyi                 |   21 +
 test-data/unit/fixtures/module_all.pyi             |   18 +
 test-data/unit/fixtures/module_all_python2.pyi     |   15 +
 test-data/unit/fixtures/ops.pyi                    |   57 +
 test-data/unit/fixtures/primitives.pyi             |   21 +
 test-data/unit/fixtures/property.pyi               |   20 +
 test-data/unit/fixtures/python2.pyi                |   18 +
 test-data/unit/fixtures/set.pyi                    |   21 +
 test-data/unit/fixtures/slice.pyi                  |   13 +
 test-data/unit/fixtures/staticmethod.pyi           |   19 +
 test-data/unit/fixtures/transform.pyi              |   30 +
 test-data/unit/fixtures/tuple-simple.pyi           |   20 +
 test-data/unit/fixtures/tuple.pyi                  |   33 +
 test-data/unit/fixtures/type.pyi                   |   21 +
 test-data/unit/fixtures/typing-full.pyi            |  120 +
 test-data/unit/fixtures/union.pyi                  |   18 +
 test-data/unit/lib-stub/__builtin__.pyi            |   27 +
 test-data/unit/lib-stub/abc.pyi                    |    3 +
 test-data/unit/lib-stub/builtins.pyi               |   21 +
 test-data/unit/lib-stub/collections.pyi            |    3 +
 test-data/unit/lib-stub/enum.pyi                   |   28 +
 test-data/unit/lib-stub/mypy_extensions.pyi        |   21 +
 test-data/unit/lib-stub/six.pyi                    |    2 +
 test-data/unit/lib-stub/sys.pyi                    |    2 +
 test-data/unit/lib-stub/types.pyi                  |   10 +
 test-data/unit/lib-stub/typing.pyi                 |   64 +
 test-data/unit/merge.test                          |  608 ++++
 test-data/unit/parse-errors.test                   |  448 +++
 test-data/unit/parse-python2.test                  |  399 +++
 test-data/unit/parse.test                          | 3386 +++++++++++++++++++
 test-data/unit/plugins/attrhook.py                 |   21 +
 test-data/unit/plugins/badreturn.py                |    2 +
 test-data/unit/plugins/badreturn2.py               |    5 +
 test-data/unit/plugins/fnplugin.py                 |   14 +
 test-data/unit/plugins/named_callable.py           |   28 +
 test-data/unit/plugins/noentry.py                  |    1 +
 test-data/unit/plugins/plugin2.py                  |   13 +
 test-data/unit/plugins/type_anal_hook.py           |   37 +
 test-data/unit/python2eval.test                    |  474 +++
 test-data/unit/pythoneval-asyncio.test             |  486 +++
 test-data/unit/pythoneval.test                     | 1396 ++++++++
 test-data/unit/semanal-abstractclasses.test        |  119 +
 test-data/unit/semanal-basic.test                  |  459 +++
 test-data/unit/semanal-classes.test                |  633 ++++
 test-data/unit/semanal-classvar.test               |  223 ++
 test-data/unit/semanal-errors.test                 | 1403 ++++++++
 test-data/unit/semanal-expressions.test            |  395 +++
 test-data/unit/semanal-modules.test                |  877 +++++
 test-data/unit/semanal-namedtuple.test             |  177 +
 test-data/unit/semanal-python2.test                |   76 +
 test-data/unit/semanal-statements.test             |  929 +++++
 test-data/unit/semanal-symtable.test               |   52 +
 test-data/unit/semanal-typealiases.test            |  440 +++
 test-data/unit/semanal-typeddict.test              |   36 +
 test-data/unit/semanal-typeinfo.test               |   88 +
 test-data/unit/semanal-types.test                  | 1495 ++++++++
 test-data/unit/stubgen.test                        |  632 ++++
 test-data/unit/typexport-basic.test                | 1169 +++++++
 test-requirements.txt                              |    9 +
 tmp-test-dirs/.gitignore                           |    4 +
 typeshed/stdlib/2and3/csv.pyi                      |   44 +-
 314 files changed, 99048 insertions(+), 62 deletions(-)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..2d7c8d4
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,30 @@
+build/
+__pycache__
+*.py[cod]
+*~
+@*
+/build
+/env
+docs/build/
+*.iml
+/out/
+.venv/
+.mypy_cache/
+.incremental_checker_cache.json
+.cache
+.runtest_log.json
+
+# Packages
+*.egg
+*.egg-info
+
+# IDEs
+.idea
+*.swp
+
+# Operating Systems
+.DS_store
+
+# Coverage Files
+htmlcov
+.coverage*
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000..6b366ad
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "typeshed"]
+	path = typeshed
+	url = http://github.com/python/typeshed
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..ed74d8a
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,23 @@
+sudo: false
+language: python
+# cache package wheels (1 cache per python version)
+cache: pip
+python:
+  - "3.3"
+  - "3.4"
+  # Specifically request 3.5.1 because we need to be compatible with that.
+  - "3.5.1"
+  - "3.6"
+  - "3.7-dev"
+  # Pypy build is disabled because it doubles the travis build time, and it rarely fails
+  # unless one one of the other builds fails.
+  # - "pypy3"
+
+install:
+  - pip install -r test-requirements.txt
+  - python2 -m pip install --user typing
+  - python setup.py install
+
+script:
+  - python runtests.py -j16 -x lint
+  - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..f7d5849
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,163 @@
+Contributing to Mypy
+====================
+
+Welcome!  Mypy is a community project that aims to work for a wide
+range of Python users and Python codebases.  If you're trying Mypy on
+your Python code, your experience and what you can contribute are
+important to the project's success.
+
+
+Getting started, building, and testing
+--------------------------------------
+
+If you haven't already, take a look at the project's
+[README.md file](README.md)
+and the [Mypy documentation](http://mypy.readthedocs.io/en/latest/),
+and try adding type annotations to your file and type-checking it with Mypy.
+
+
+Discussion
+----------
+
+If you've run into behavior in Mypy you don't understand, or you're
+having trouble working out a good way to apply it to your code, or
+you've found a bug or would like a feature it doesn't have, we want to
+hear from you!
+
+Our main forum for discussion is the project's [GitHub issue
+tracker](https://github.com/python/mypy/issues).  This is the right
+place to start a discussion of any of the above or most any other
+topic concerning the project.
+
+For less formal discussion we have a chat room on
+[gitter.im](https://gitter.im/python/typing).  Some Mypy core developers
+are almost always present; feel free to find us there and we're happy
+to chat.  Substantive technical discussion will be directed to the
+issue tracker.
+
+(We also have an IRC channel, `#python-mypy` on irc.freenode.net.
+This is lightly used, we have mostly switched to the gitter room
+mentioned above.)
+
+#### Code of Conduct
+
+Everyone participating in the Mypy community, and in particular in our
+issue tracker, pull requests, and IRC channel, is expected to treat
+other people with respect and more generally to follow the guidelines
+articulated in the [Python Community Code of
+Conduct](https://www.python.org/psf/codeofconduct/).
+
+
+Submitting Changes
+------------------
+
+Even more excellent than a good bug report is a fix for a bug, or the
+implementation of a much-needed new feature. (*)  We'd love to have
+your contributions.
+
+(*) If your new feature will be a lot of work, we recommend talking to
+    us early -- see below.
+
+We use the usual GitHub pull-request flow, which may be familiar to
+you if you've contributed to other projects on GitHub.  For the mechanics,
+see [our git and GitHub workflow help page](https://github.com/python/mypy/wiki/Using-Git-And-GitHub),
+or [GitHub's own documentation](https://help.github.com/articles/using-pull-requests/).
+
+Anyone interested in Mypy may review your code.  One of the Mypy core
+developers will merge your pull request when they think it's ready.
+For every pull request, we aim to promptly either merge it or say why
+it's not yet ready; if you go a few days without a reply, please feel
+free to ping the thread by adding a new comment.
+
+At present the core developers are (alphabetically):
+* David Fisher (@ddfisher)
+* Jukka Lehtosalo (@JukkaL)
+* Greg Price (@gnprice)
+* Guido van Rossum (@gvanrossum)
+
+
+Preparing Changes
+-----------------
+
+Before you begin: if your change will be a significant amount of work
+to write, we highly recommend starting by opening an issue laying out
+what you want to do.  That lets a conversation happen early in case
+other contributors disagree with what you'd like to do or have ideas
+that will help you do it.
+
+The best pull requests are focused, clearly describe what they're for
+and why they're correct, and contain tests for whatever changes they
+make to the code's behavior.  As a bonus these are easiest for someone
+to review, which helps your pull request get merged quickly!  Standard
+advice about good pull requests for open-source projects applies; we
+have [our own writeup](https://github.com/python/mypy/wiki/Good-Pull-Request)
+of this advice.
+
+See also our [coding conventions](https://github.com/python/mypy/wiki/Code-Conventions) --
+which consist mainly of a reference to
+[PEP 8](https://www.python.org/dev/peps/pep-0008/) -- for the code you
+put in the pull request.
+
+You may also find other pages in the
+[Mypy developer guide](https://github.com/python/mypy/wiki/Developer-Guides)
+helpful in developing your change.
+
+
+Core developer guidelines
+-------------------------
+
+Core developers should follow these rules when processing pull requests:
+
+* Always wait for tests to pass before merging PRs.
+* Use "[Squash and merge](https://github.com/blog/2141-squash-your-commits)"
+  to merge PRs.
+* Delete branches for merged PRs (by core devs pushing to the main repo).
+
+
+Issue-tracker conventions
+-------------------------
+
+We aim to reply to all new issues promptly.  We'll assign a milestone
+to help us track which issues we intend to get to when, and may apply
+labels to carry some other information.  Here's what our milestones
+and labels mean.
+
+### Task priority and sizing
+
+We use GitHub "labels" ([see our
+list](https://github.com/python/mypy/labels)) to roughly order what we
+want to do soon and less soon.  There's two dimensions taken into
+account: **priority** (does it matter to our users) and **size** (how
+long will it take to complete).
+
+Bugs that aren't a huge deal but do matter to users and don't seem
+like a lot of work to fix generally will be dealt with sooner; things
+that will take longer may go further out.
+
+We are trying to keep the backlog at a manageable size, an issue that is
+unlikely to be acted upon in foreseeable future is going to be
+respectfully closed.  This doesn't mean the issue is not important, but
+rather reflects the limits of the team.
+
+The **question** label is for issue threads where a user is asking a
+question but it isn't yet clear that it represents something to actually
+change.  We use the issue tracker as the preferred venue for such
+questions, even when they aren't literally issues, to keep down the
+number of distinct discussion venues anyone needs to track.  These might
+evolve into a bug or feature request.
+
+Issues **without a priority or size** haven't been triaged.  We aim to
+triage all new issues promptly, but there are some issues from previous
+years that we haven't yet re-reviewed since adopting these conventions.
+
+### Other labels
+
+* **needs discussion**: This issue needs agreement on some kind of
+  design before it makes sense to implement it, and it either doesn't
+  yet have a design or doesn't yet have agreement on one.
+* **feature**, **bug**, **crash**, **refactoring**, **documentation**:
+  These classify the user-facing impact of the change.  Specifically
+  "refactoring" means there should be no user-facing effect.
+* **topic-** labels group issues touching a similar aspect of the
+  project, for example PEP 484 compatibility, a specific command-line
+  option or dependency.
diff --git a/CREDITS b/CREDITS
new file mode 100644
index 0000000..d4fe9ee
--- /dev/null
+++ b/CREDITS
@@ -0,0 +1,101 @@
+Credits
+-------
+
+Lead developer:
+
+  Jukka Lehtosalo <jukka.lehtosalo at iki.fi>
+
+Core team:
+
+  Guido <guido at dropbox.com>
+  David Fisher <ddfisher at dropbox.com>
+  Greg Price <gregprice at dropbox.com>
+
+Contributors (in alphabetical order, including typeshed):
+
+  Tim Abbott
+  Steven Allen (@Stebalien)
+  Della Anjeh
+  Reid Barton (@rwbarton)
+  Matthias Bussonnier
+  Anup Chenthamarakshan
+  Kyle Consalus
+  Ian Cordasco
+  ctcutler
+  Ben Darnell
+  Miguel Garcia (@rockneurotiko)
+  Mihnea Giurgea
+  Ryan Gonzalez (@kirbyfan64)
+  James Guthrie
+  Jared Hance
+  Ashley Hewson (@ashleyh)
+  icoxfog417
+  Bob Ippolito (@etrepum)
+  ismail-s
+  Sander Kersten (@spkersten)
+  Matthias Kramm
+  Ian Kronquist (@iankronquist)
+  Yuval Langer
+  Howard Lee
+  Tad Leonard
+  Li Haoyi
+  Darjus Loktevic
+  Ben Longbons
+  Florian Ludwig (@FlorianLudwig)
+  Robert T. McGibbon
+  Ron Murawski <ron at horizonchess.com>
+  Motoki Naruse
+  Jared Pochtar (@jaredp)
+  Michal Pokorný
+  Eric Price (@ecprice)
+  Brodie Rao
+  Sebastian Reuße
+  Sebastian Riikonen
+  Seo Sanghyeon
+  Marek Sapota
+  Gigi Sayfan
+  Vlad Shcherbina
+  Anders Schuller
+  Daniel Shaulov
+  David Shea
+  Vita Smid
+  Schuyler Smith
+  Marcell Vazquez-Chanlatte (@mvcisback)
+  Prayag Verma
+  Igor Vuk (@ivuk)
+  Jeff Walden (@jswalden)
+  Michael Walter
+  Jing Wang
+  Wen Zhang
+  Roy Williams
+  wizzardx
+  Matthew Wright
+  Yuanchao Zhu (@yczhu)
+  Gennadiy Zlobin (@gennad)
+
+Additional thanks to:
+
+  Alex Allain
+  Max Bolingbroke
+  Peter Calvert
+  Kannan Goundan
+  Kathy Gray
+  David J Greaves
+  Riitta Ikonen
+  Terho Ikonen
+  Stephen Kell
+  Łukasz Langa
+  Laura Lehtosalo
+  Peter Ludemann
+  Seppo Mattila
+  Robin Message
+  Alan Mycroft
+  Dominic Orchard
+  Pekka Rapinoja
+  Matt Robben
+  Satnam Singh
+  Juha Sorva
+  Clay Sweetser
+  Jorma Tarhio
+  Jussi Tuovila
+  Andrey Vlasovskikh
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..afddd48
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,227 @@
+Mypy is licensed under the terms of the MIT license, reproduced below.
+
+= = = = =
+
+The MIT License
+
+Copyright (c) 2015-2016 Jukka Lehtosalo and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+= = = = =
+
+Portions of mypy are licensed under different licenses.  The files
+under stdlib-samples are licensed under the PSF 2 License, reproduced below.
+
+= = = = =
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
+alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions.  Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee.  This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party.  As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee.  Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement.  This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013.  This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee.  This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+        ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands.  All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+= = = = =
\ No newline at end of file
diff --git a/PKG-INFO b/PKG-INFO
index b194318..641f4ed 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: mypy
-Version: 0.520
+Version: 0.521
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..e2e36f9
--- /dev/null
+++ b/README.md
@@ -0,0 +1,268 @@
+Mypy: Optional Static Typing for Python
+=======================================
+
+[![Build Status](https://travis-ci.org/python/mypy.svg)](https://travis-ci.org/python/mypy)
+[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+
+
+Got a question? Join us on Gitter!
+----------------------------------
+
+We don't have a mailing list; but we are always happy to answer
+questions on [gitter chat](https://gitter.im/python/typing).  If you are
+sure you've found a bug please search our issue trackers for a
+duplicate before filing a new issue:
+
+- [mypy tracker](https://github.com/python/mypy/issues)
+  for mypy isues
+- [typeshed tracker](https://github.com/python/typeshed/issues)
+  for issues with specific modules
+- [typing tracker](https://github.com/python/typing/issues)
+  for discussion of new type system features (PEP 484 changes) and
+  runtime bugs in the typing module
+
+What is mypy?
+-------------
+
+Mypy is an optional static type checker for Python.  You can add type
+hints to your Python programs using the standard for type
+annotations introduced in Python 3.5 ([PEP 484](https://www.python.org/dev/peps/pep-0484/)), and use mypy to
+type check them statically. Find bugs in your programs without even
+running them!
+
+The type annotation standard has also been backported to earlier
+Python 3.x versions.  Mypy supports Python 3.3 and later.
+
+For Python 2.7, you can add annotations as comments (this is also
+specified in [PEP 484](https://www.python.org/dev/peps/pep-0484/)).
+
+You can mix dynamic and static typing in your programs. You can always
+fall back to dynamic typing when static typing is not convenient, such
+as for legacy code.
+
+Here is a small example to whet your appetite:
+
+```python
+from typing import Iterator
+
+def fib(n: int) -> Iterator[int]:
+    a, b = 0, 1
+    while a < n:
+        yield a
+        a, b = b, a + b
+```
+
+Mypy is in development; some features are missing and there are bugs.
+See 'Development status' below.
+
+
+Requirements
+------------
+
+You need Python 3.3 or later to run mypy.  You can have multiple Python
+versions (2.x and 3.x) installed on the same system without problems.
+
+In Ubuntu, Mint and Debian you can install Python 3 like this:
+
+    $ sudo apt-get install python3 python3-pip
+
+For other Linux flavors, OS X and Windows, packages are available at
+
+  http://www.python.org/getit/
+
+
+Quick start
+-----------
+
+Mypy can be installed using pip:
+
+    $ python3 -m pip install -U mypy
+
+If you want to run the latest version of the code, you can install from git:
+
+    $ python3 -m pip install -U git+git://github.com/python/mypy.git
+
+
+Now, if Python on your system is configured properly (else see
+"Troubleshooting" below), you can type-check the [statically typed parts] of a
+program like this:
+
+    $ mypy PROGRAM
+
+You can always use a Python interpreter to run your statically typed
+programs, even if they have type errors:
+
+    $ python3 PROGRAM
+
+[statically typed parts]: http://mypy.readthedocs.io/en/latest/basics.html#function-signatures
+
+
+IDE & Linter Integrations
+-------------------------
+
+Mypy can be integrated into popular IDEs:
+
+* Vim: [vim-mypy](https://github.com/Integralist/vim-mypy)
+* Emacs: using [Flycheck](https://github.com/flycheck/) and [Flycheck-mypy](https://github.com/lbolla/emacs-flycheck-mypy/issues)
+* Sublime Text: [SublimeLinter-contrib-mypy]
+* Atom: [linter-mypy](https://atom.io/packages/linter-mypy)
+* PyCharm: PyCharm integrates [its own implementation of PEP 484](https://www.jetbrains.com/help/pycharm/2017.1/type-hinting-in-pycharm.html).
+
+Mypy can also be integrated into [Flake8] using [flake8-mypy].
+
+[Flake8]: http://flake8.pycqa.org/
+[flake8-mypy]: https://github.com/ambv/flake8-mypy
+
+
+Web site and documentation
+--------------------------
+
+Documentation and additional information is available at the web site:
+
+  http://www.mypy-lang.org/
+
+Or you can jump straight to the documentation:
+
+  http://mypy.readthedocs.io/
+
+
+Troubleshooting
+---------------
+
+Depending on your configuration, you may have to run `pip3` like
+this:
+
+    $ python3 -m pip install -U mypy
+
+This should automatically installed the appropriate version of
+mypy's parser, typed-ast.  If for some reason it does not, you
+can install it manually:
+
+    $ python3 -m pip install -U typed-ast
+
+If the `mypy` command isn't found after installation: After either
+`pip3 install` or `setup.py install`, the `mypy` script and
+dependencies, including the `typing` module, will be installed to
+system-dependent locations.  Sometimes the script directory will not
+be in `PATH`, and you have to add the target directory to `PATH`
+manually or create a symbolic link to the script.  In particular, on
+Mac OS X, the script may be installed under `/Library/Frameworks`:
+
+    /Library/Frameworks/Python.framework/Versions/<version>/bin
+
+In Windows, the script is generally installed in
+`\PythonNN\Scripts`. So, type check a program like this (replace
+`\Python34` with your Python installation path):
+
+    C:\>\Python34\python \Python34\Scripts\mypy PROGRAM
+
+### Working with `virtualenv`
+
+If you are using [`virtualenv`](https://virtualenv.pypa.io/en/stable/),
+make sure you are running a python3 environment. Installing via `pip3`
+in a v2 environment will not configure the environment to run installed
+modules from the command line.
+
+    $ python3 -m pip install -U virtualenv
+    $ python3 -m virtualenv env
+
+
+Quick start for contributing to mypy
+------------------------------------
+
+If you want to contribute, first clone the mypy git repository:
+
+    $ git clone --recurse-submodules https://github.com/python/mypy.git
+
+If you've already cloned the repo without `--recurse-submodules`,
+you need to pull in the typeshed repo as follows:
+
+    $ git submodule init
+    $ git submodule update
+
+Either way you should now have a subdirectory `typeshed` containing a
+clone of the typeshed repo (`https://github.com/python/typeshed`).
+
+From the mypy directory, use pip to install mypy:
+
+    $ cd mypy
+    $ python3 -m pip install -U .
+
+Replace `python3` with your Python 3 interpreter.  You may have to do
+the above as root. For example, in Ubuntu:
+
+    $ sudo python3 -m pip install -U .
+
+Now you can use the `mypy` program just as above.  In case of trouble
+see "Troubleshooting" above.
+
+
+Working with the git version of mypy
+------------------------------------
+
+mypy contains a submodule, "typeshed". See http://github.com/python/typeshed.
+This submodule contains types for the Python standard library.
+
+Due to the way git submodules work, you'll have to do
+```
+  git submodule update typeshed
+```
+whenever you change branches, merge, rebase, or pull.
+
+(It's possible to automate this: Search Google for "git hook update submodule")
+
+
+Tests
+-----
+
+See [Test README.md](test-data/unit/README.md)
+
+
+Development status
+------------------
+
+Mypy is work in progress and is not yet production quality, though
+mypy development has been done using mypy for a while!
+
+Here are some of the more significant Python features not supported
+right now (but all of these will improve):
+
+ - properties with setters not supported
+ - limited metaclass support
+ - only a subset of Python standard library modules are supported, and some
+   only partially
+ - 3rd party module support is limited
+
+The current development focus is to have a good coverage of Python
+features and the standard library (both 3.x and 2.7).
+
+
+Issue tracker
+-------------
+
+Please report any bugs and enhancement ideas using the mypy issue
+tracker:
+
+  https://github.com/python/mypy/issues
+
+Feel free to also ask questions on the tracker.
+
+
+Help wanted
+-----------
+
+Any help in testing, development, documentation and other tasks is
+highly appreciated and useful to the project. There are tasks for
+contributors of all experience levels. If you're just getting started,
+check out the
+[difficulty/easy](https://github.com/python/mypy/labels/difficulty%2Feasy)
+label.
+
+For more details, see the file [CONTRIBUTING.md](CONTRIBUTING.md).
+
+
+License
+-------
+
+Mypy is licensed under the terms of the MIT License (see the file
+LICENSE).
diff --git a/ROADMAP.md b/ROADMAP.md
new file mode 100644
index 0000000..132d53c
--- /dev/null
+++ b/ROADMAP.md
@@ -0,0 +1,96 @@
+# Mypy Roadmap
+
+The goal of the roadmap is to document areas the mypy core team is
+planning to work on in the future or is currently working on. PRs
+targeting these areas are very welcome, but please check first with a
+core team member that nobody else is working on the same thing.
+
+**Note:** This doesn’t include everything that the core team will work
+on, and everything is subject to change. Near-term plans are likely
+more accurate.
+
+## April-June 2017
+
+- Add more comprehensive testing for `--incremental` and `--quick`
+  modes to improve reliability. At least write more unit tests with
+  focus on areas that have previously had bugs.
+  ([issue](https://github.com/python/mypy/issues/3455))
+
+- Speed up `--quick` mode to better support million+ line codebases
+  through some of these:
+
+  - Make it possible to use remote caching for incremental cache
+    files. This would speed up a cold run with no local cache data.
+    We need to update incremental cache to use hashes to determine
+    whether files have changes to allow
+    [sharing cache data](https://github.com/python/mypy/issues/3403).
+
+  - See if we can speed up deserialization of incremental cache
+    files. Initial experiments aren’t very promising though so there
+    might not be any easy wins left.
+    ([issue](https://github.com/python/mypy/issues/3456))
+
+- Improve support for complex signatures such as `open(fn, 'rb')` and
+  specific complex decorators such as `contextlib.contextmanager`
+  through type checker plugins/hooks.
+  ([issue](https://github.com/python/mypy/issues/1240))
+
+- Document basic properties of all type operations used within mypy,
+  including compatibility, proper subtyping, joins and meets.
+  ([issue](https://github.com/python/mypy/issues/3454))
+
+- Make TypedDict an officially supported mypy feature. This makes it
+  possible to give precise types for dictionaries that represent JSON
+  objects, such as `{"path": "/dir/fnam.ext", "size": 1234}`.
+  ([issue](https://github.com/python/mypy/issues/3453))
+
+- Make error messages more useful and informative.
+  ([issue](https://github.com/python/mypy/labels/topic-usability))
+
+- Resolve [#2008](https://github.com/python/mypy/issues/2008) (we are
+  converging on approach 4).
+
+## July-December 2017
+
+- Invest some effort into systematically filling in missing
+  annotations and stubs in typeshed, with focus on features heavily
+  used at Dropbox. Better support for ORMs will be a separate
+  project.
+
+- Improve opt-in warnings about `Any` types to make it easier to keep
+  code free from unwanted `Any` types. For example, warn about using
+  `list` (instead of `List[x]`) and calling `open` if we can’t infer a
+  precise return type, or using types imported from ignored modules
+  (they are implicitly `Any`).
+
+- Add support for protocols and structural subtyping (PEP 544).
+
+- Switch completely to pytest and remove the custom testing framework.
+  ([issue](https://github.com/python/mypy/issues/1673))
+
+- Make it possible to run mypy as a daemon to avoid reprocessing the
+  entire program on each run. This will improve performance
+  significantly. Even when using the incremental mode, processing a
+  large number of files is not cheap.
+
+- Refactor and simplify specific tricky parts of mypy internals, such
+  as the [conditional type binder](https://github.com/python/mypy/issues/3457),
+  [symbol tables](https://github.com/python/mypy/issues/3458) or
+  the various [semantic analysis passes](https://github.com/python/mypy/issues/3459).
+
+- Implement a general type system plugin architecture. It should be
+  able to support some typical ORM features at least, such as
+  metaclasses that add methods with automatically inferred signatures
+  and complex descriptors such as those used by Django models.
+  ([issue](https://github.com/python/mypy/issues/1240))
+
+- Add support for statically typed
+  [protobufs](https://developers.google.com/protocol-buffers/).
+
+- Provide much faster, reliable interactive feedback through
+  fine-grained incremental type checking, built on top the daemon
+  mode.
+
+- Start work on editor plugins and support for selected IDE features.
+
+- Turn on `--strict-optional` by default.
diff --git a/appveyor.yml b/appveyor.yml
new file mode 100644
index 0000000..5612c20
--- /dev/null
+++ b/appveyor.yml
@@ -0,0 +1,41 @@
+environment:
+  matrix:
+
+    - PYTHON: "C:\\Python36-x64"
+      PYTHON_VERSION: "3.6.x"
+      PYTHON_ARCH: "64"
+
+install:
+    - "git config core.symlinks true"
+    - "git reset --hard"
+    - "%PYTHON%\\python.exe -m pip install -r test-requirements.txt"
+    - "git submodule update --init typeshed"
+    - "cd typeshed && git config core.symlinks true && git reset --hard && cd .."
+    - "%PYTHON%\\python.exe setup.py -q install"
+
+build: off
+
+test_script:
+    # Ignore lint (it's run in Travis)
+    - "%PYTHON%\\python.exe runtests.py -x lint"
+
+after_test:
+  - "%PYTHON%\\python.exe -m pip install wheel"
+  - "%PYTHON%\\python.exe setup.py bdist_wheel -p win32"
+  - "%PYTHON%\\python.exe setup.py bdist_wheel -p win_amd64"
+
+artifacts:
+  - path: dist\*
+
+skip_commits:
+  files:
+    - docs/**/*
+    - '**/*.rst'
+    - '**/*.md'
+    - .gitignore
+    - .runtest_log.json
+    - .travis.yml
+    - CREDITS
+    - LICENSE
+
+skip_branch_with_pr: true
diff --git a/build-requirements.txt b/build-requirements.txt
new file mode 100644
index 0000000..0a8547b
--- /dev/null
+++ b/build-requirements.txt
@@ -0,0 +1,2 @@
+setuptools
+wheel
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 0000000..4c3b890
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,13 @@
+import os.path
+
+import pytest
+
+pytest_plugins = [
+    'mypy.test.data',
+]
+
+
+def pytest_configure(config):
+    mypy_source_root = os.path.dirname(os.path.abspath(__file__))
+    if os.getcwd() != mypy_source_root:
+        os.chdir(mypy_source_root)
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..be69e9d
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  texinfo    to make Texinfo files"
+	@echo "  info       to make Texinfo files and run them through makeinfo"
+	@echo "  gettext    to make PO message catalogs"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  xml        to make Docutils-native XML files"
+	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Mypy.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Mypy.qhc"
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/Mypy"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Mypy"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through platex and dvipdfmx..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo
+	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+	@echo "Run \`make' in that directory to run these through makeinfo" \
+	      "(use \`make info' here to do that automatically)."
+
+info:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo "Running Texinfo files through makeinfo..."
+	make -C $(BUILDDIR)/texinfo info
+	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+	@echo
+	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+	@echo
+	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+	@echo
+	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/docs/README.md b/docs/README.md
new file mode 100644
index 0000000..2122eef
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,49 @@
+Mypy Documentation
+==================
+
+What's this?
+------------
+
+This directory contains the source code for Mypy documentation (under `source/`)
+and build scripts. The documentation uses Sphinx and reStructuredText. We use
+`sphinx-rtd-theme` as the documentation theme.
+
+Building the documentation
+--------------------------
+
+Install Sphinx and other dependencies (i.e. theme) needed for the documentation.
+From the `docs` directory, use `pip`:
+
+```
+$ pip install -r requirements-docs.txt
+```
+
+Build the documentation like this:
+
+```
+$ make html
+```
+
+The built documentation will be placed in the `docs/build` directory. Open
+`docs/build/index.html` to view the documentation.
+
+Helpful documentation build commands
+------------------------------------
+
+Clean the documentation build:
+
+```
+$ make clean
+```
+
+Test and check the links found in the documentation:
+
+```
+$ make linkcheck
+```
+
+Documentation on Read The Docs
+------------------------------
+
+The mypy documentation is hosted on Read The Docs, and the latest version
+can be found at https://mypy.readthedocs.io/en/latest.
diff --git a/docs/make.bat b/docs/make.bat
new file mode 100755
index 0000000..1e3d843
--- /dev/null
+++ b/docs/make.bat
@@ -0,0 +1,242 @@
+ at ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+	set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
+set I18NSPHINXOPTS=%SPHINXOPTS% source
+if NOT "%PAPER%" == "" (
+	set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+	set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+	:help
+	echo.Please use `make ^<target^>` where ^<target^> is one of
+	echo.  html       to make standalone HTML files
+	echo.  dirhtml    to make HTML files named index.html in directories
+	echo.  singlehtml to make a single large HTML file
+	echo.  pickle     to make pickle files
+	echo.  json       to make JSON files
+	echo.  htmlhelp   to make HTML files and a HTML help project
+	echo.  qthelp     to make HTML files and a qthelp project
+	echo.  devhelp    to make HTML files and a Devhelp project
+	echo.  epub       to make an epub
+	echo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+	echo.  text       to make text files
+	echo.  man        to make manual pages
+	echo.  texinfo    to make Texinfo files
+	echo.  gettext    to make PO message catalogs
+	echo.  changes    to make an overview over all changed/added/deprecated items
+	echo.  xml        to make Docutils-native XML files
+	echo.  pseudoxml  to make pseudoxml-XML files for display purposes
+	echo.  linkcheck  to check all external links for integrity
+	echo.  doctest    to run all doctests embedded in the documentation if enabled
+	goto end
+)
+
+if "%1" == "clean" (
+	for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+	del /q /s %BUILDDIR%\*
+	goto end
+)
+
+
+%SPHINXBUILD% 2> nul
+if errorlevel 9009 (
+	echo.
+	echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+	echo.installed, then set the SPHINXBUILD environment variable to point
+	echo.to the full path of the 'sphinx-build' executable. Alternatively you
+	echo.may add the Sphinx directory to PATH.
+	echo.
+	echo.If you don't have Sphinx installed, grab it from
+	echo.http://sphinx-doc.org/
+	exit /b 1
+)
+
+if "%1" == "html" (
+	%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+	goto end
+)
+
+if "%1" == "dirhtml" (
+	%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+	goto end
+)
+
+if "%1" == "singlehtml" (
+	%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+	goto end
+)
+
+if "%1" == "pickle" (
+	%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the pickle files.
+	goto end
+)
+
+if "%1" == "json" (
+	%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the JSON files.
+	goto end
+)
+
+if "%1" == "htmlhelp" (
+	%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+	goto end
+)
+
+if "%1" == "qthelp" (
+	%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+	echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Mypy.qhcp
+	echo.To view the help file:
+	echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Mypy.ghc
+	goto end
+)
+
+if "%1" == "devhelp" (
+	%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished.
+	goto end
+)
+
+if "%1" == "epub" (
+	%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The epub file is in %BUILDDIR%/epub.
+	goto end
+)
+
+if "%1" == "latex" (
+	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+	goto end
+)
+
+if "%1" == "latexpdf" (
+	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+	cd %BUILDDIR%/latex
+	make all-pdf
+	cd %BUILDDIR%/..
+	echo.
+	echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+	goto end
+)
+
+if "%1" == "latexpdfja" (
+	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+	cd %BUILDDIR%/latex
+	make all-pdf-ja
+	cd %BUILDDIR%/..
+	echo.
+	echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+	goto end
+)
+
+if "%1" == "text" (
+	%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The text files are in %BUILDDIR%/text.
+	goto end
+)
+
+if "%1" == "man" (
+	%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The manual pages are in %BUILDDIR%/man.
+	goto end
+)
+
+if "%1" == "texinfo" (
+	%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+	goto end
+)
+
+if "%1" == "gettext" (
+	%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+	goto end
+)
+
+if "%1" == "changes" (
+	%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.The overview file is in %BUILDDIR%/changes.
+	goto end
+)
+
+if "%1" == "linkcheck" (
+	%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+	goto end
+)
+
+if "%1" == "doctest" (
+	%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+	goto end
+)
+
+if "%1" == "xml" (
+	%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The XML files are in %BUILDDIR%/xml.
+	goto end
+)
+
+if "%1" == "pseudoxml" (
+	%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
+	goto end
+)
+
+:end
diff --git a/docs/requirements-docs.txt b/docs/requirements-docs.txt
new file mode 100644
index 0000000..d20641e
--- /dev/null
+++ b/docs/requirements-docs.txt
@@ -0,0 +1,2 @@
+Sphinx >= 1.4.4
+sphinx-rtd-theme >= 0.1.9
diff --git a/docs/source/additional_features.rst b/docs/source/additional_features.rst
new file mode 100644
index 0000000..b9dd07f
--- /dev/null
+++ b/docs/source/additional_features.rst
@@ -0,0 +1,9 @@
+Additional features
+-------------------
+
+Several mypy features are not currently covered by this tutorial,
+including the following:
+
+- inheritance between generic classes
+- compatibility and subtyping of generic types, including covariance of generic types
+- ``super()``
diff --git a/docs/source/basics.rst b/docs/source/basics.rst
new file mode 100644
index 0000000..572364d
--- /dev/null
+++ b/docs/source/basics.rst
@@ -0,0 +1,194 @@
+Basics
+======
+
+This chapter introduces some core concepts of mypy, including function
+annotations, the ``typing`` module and library stubs. Read it carefully,
+as the rest of documentation may not make much sense otherwise.
+
+Function signatures
+*******************
+
+A function without a type annotation is considered dynamically typed:
+
+.. code-block:: python
+
+   def greeting(name):
+       return 'Hello, {}'.format(name)
+
+You can declare the signature of a function using the Python 3
+annotation syntax (Python 2 is discussed later in :ref:`python2`).
+This makes the the function statically typed, and that causes type
+checker report type errors within the function.
+
+Here's a version of the above function that is statically typed and
+will be type checked:
+
+.. code-block:: python
+
+   def greeting(name: str) -> str:
+       return 'Hello, {}'.format(name)
+
+If a function does not explicitly return a value we give the return
+type as ``None``. Using a ``None`` result in a statically typed
+context results in a type check error:
+
+.. code-block:: python
+
+   def p() -> None:
+       print('hello')
+
+   a = p()   # Type check error: p has None return value
+
+Arguments with default values can be annotated as follows:
+
+.. code-block:: python
+
+   def greeting(name: str, prefix: str = 'Mr.') -> str:
+      return 'Hello, {} {}'.format(name, prefix)
+
+Mixing dynamic and static typing
+********************************
+
+Mixing dynamic and static typing within a single file is often
+useful. For example, if you are migrating existing Python code to
+static typing, it may be easiest to do this incrementally, such as by
+migrating a few functions at a time. Also, when prototyping a new
+feature, you may decide to first implement the relevant code using
+dynamic typing and only add type signatures later, when the code is
+more stable.
+
+.. code-block:: python
+
+   def f():
+       1 + 'x'  # No static type error (dynamically typed)
+
+   def g() -> None:
+       1 + 'x'  # Type check error (statically typed)
+
+.. note::
+
+   The earlier stages of mypy, known as the semantic analysis, may
+   report errors even for dynamically typed functions. However, you
+   should not rely on this, as this may change in the future.
+
+The typing module
+*****************
+
+The ``typing`` module contains many definitions that are useful in
+statically typed code. You typically use ``from ... import`` to import
+them (we'll explain ``Iterable`` later in this document):
+
+.. code-block:: python
+
+   from typing import Iterable
+
+   def greet_all(names: Iterable[str]) -> None:
+       for name in names:
+           print('Hello, {}'.format(name))
+
+For brevity, we often omit the ``typing`` import in code examples, but
+you should always include it in modules that contain statically typed
+code.
+
+The presence or absence of the ``typing`` module does not affect
+whether your code is type checked; it is only required when you use
+one or more special features it defines.
+
+Type checking programs
+**********************
+
+You can type check a program by using the ``mypy`` tool, which is
+basically a linter -- it checks your program for errors without actually
+running it::
+
+   $ mypy program.py
+
+All errors reported by mypy are essentially warnings that you are free
+to ignore, if you so wish.
+
+The next chapter explains how to download and install mypy:
+:ref:`getting-started`.
+
+More command line options are documented in :ref:`command-line`.
+
+.. note::
+
+   Depending on how mypy is configured, you may have to explicitly use
+   the Python 3 interpreter to run mypy. The mypy tool is an ordinary
+   mypy (and so also Python) program. For example::
+
+     $ python3 -m mypy program.py
+
+.. _library-stubs:
+
+Library stubs and the Typeshed repo
+***********************************
+
+In order to type check code that uses library modules such as those
+included in the Python standard library, you need to have library
+*stubs*. A library stub defines a skeleton of the public interface
+of the library, including classes, variables and functions and
+their types, but dummy function bodies.
+
+For example, consider this code:
+
+.. code-block:: python
+
+  x = chr(4)
+
+Without a library stub, the type checker would have no way of
+inferring the type of ``x`` and checking that the argument to ``chr``
+has a valid type. Mypy incorporates the `typeshed
+<https://github.com/python/typeshed>`_ project, which contains library
+stubs for the Python builtins and the standard library. The stub for
+the builtins contains a definition like this for ``chr``:
+
+.. code-block:: python
+
+    def chr(code: int) -> str: ...
+
+In stub files we don't care about the function bodies, so we use 
+an ellipsis instead.  That ``...`` is three literal dots!
+
+Mypy complains if it can't find a stub (or a real module) for a
+library module that you import. You can create a stub easily; here is
+an overview:
+
+* Write a stub file for the library and store it as a ``.pyi`` file in
+  the same directory as the library module.
+* Alternatively, put your stubs (``.pyi`` files) in a directory
+  reserved for stubs (e.g., ``myproject/stubs``). In this case you
+  have to set the environment variable ``MYPYPATH`` to refer to the
+  directory.  For example::
+
+    $ export MYPYPATH=~/work/myproject/stubs
+
+Use the normal Python file name conventions for modules, e.g. ``csv.pyi``
+for module ``csv``. Use a subdirectory with ``__init__.pyi`` for packages.
+
+If a directory contains both a ``.py`` and a ``.pyi`` file for the
+same module, the ``.pyi`` file takes precedence. This way you can
+easily add annotations for a module even if you don't want to modify
+the source code. This can be useful, for example, if you use 3rd party
+open source libraries in your program (and there are no stubs in
+typeshed yet).
+
+That's it! Now you can access the module in mypy programs and type check
+code that uses the library. If you write a stub for a library module,
+consider making it available for other programmers that use mypy 
+by contributing it back to the typeshed repo.
+
+There is more information about creating stubs in the
+`mypy wiki <https://github.com/python/mypy/wiki/Creating-Stubs-For-Python-Modules>`_.
+The following sections explain the kinds of type annotations you can use
+in your programs and stub files.
+
+.. note::
+
+   You may be tempted to point ``MYPYPATH`` to the standard library or
+   to the ``site-packages`` directory where your 3rd party packages
+   are installed. This is almost always a bad idea -- you will likely
+   get tons of error messages about code you didn't write and that
+   mypy can't analyze all that well yet, and in the worst case
+   scenario mypy may crash due to some construct in a 3rd party
+   package that it didn't expect.
diff --git a/docs/source/builtin_types.rst b/docs/source/builtin_types.rst
new file mode 100644
index 0000000..4426df7
--- /dev/null
+++ b/docs/source/builtin_types.rst
@@ -0,0 +1,37 @@
+Built-in types
+==============
+
+These are examples of some of the most common built-in types:
+
+=================== ===============================
+Type                Description
+=================== ===============================
+``int``             integer of arbitrary size
+``float``           floating point number
+``bool``            boolean value
+``str``             unicode string
+``bytes``           8-bit string
+``object``          an arbitrary object (``object`` is the common base class)
+``List[str]``       list of ``str`` objects
+``Dict[str, int]``  dictionary from ``str`` keys to ``int`` values
+``Iterable[int]``   iterable object containing ints
+``Sequence[bool]``  sequence of booleans
+``Any``             dynamically typed value with an arbitrary type
+=================== ===============================
+
+The type ``Any`` and type constructors ``List``, ``Dict``,
+``Iterable`` and ``Sequence`` are defined in the ``typing`` module.
+
+The type ``Dict`` is a *generic* class, signified by type arguments within
+``[...]``. For example, ``Dict[int, str]`` is a dictionary from integers to
+strings and and ``Dict[Any, Any]`` is a dictionary of dynamically typed
+(arbitrary) values and keys. ``List`` is another generic class. ``Dict`` and
+``List`` are aliases for the built-ins ``dict`` and ``list``, respectively.
+
+``Iterable`` and ``Sequence`` are generic abstract base classes that
+correspond to Python protocols. For example, a ``str`` object or a
+``List[str]`` object is valid
+when ``Iterable[str]`` or ``Sequence[str]`` is expected. Note that even though
+they are similar to abstract base classes defined in ``abc.collections``
+(formerly ``collections``), they are not identical, since the built-in
+collection type objects do not support indexing.
diff --git a/docs/source/casts.rst b/docs/source/casts.rst
new file mode 100644
index 0000000..900ee0c
--- /dev/null
+++ b/docs/source/casts.rst
@@ -0,0 +1,39 @@
+.. _casts:
+
+Casts
+=====
+
+Mypy supports type casts that are usually used to coerce a statically
+typed value to a subtype. Unlike languages such as Java or C#,
+however, mypy casts are only used as hints for the type checker, and they
+don't perform a runtime type check. Use the function ``cast`` to perform a
+cast:
+
+.. code-block:: python
+
+   from typing import cast, List
+
+   o = [1] # type: object
+   x = cast(List[int], o)  # OK
+   y = cast(List[str], o)  # OK (cast performs no actual runtime check)
+
+To support runtime checking of casts such as the above, we'd have to check
+the types of all list items, which would be very inefficient for large lists.
+Use assertions if you want to
+perform an actual runtime check. Casts are used to silence spurious
+type checker warnings and give the type checker a little help when it can't
+quite understand what is going on.
+
+You don't need a cast for expressions with type ``Any``, or when
+assigning to a variable with type ``Any``, as was explained earlier.
+You can also use ``Any`` as the cast target type -- this lets you perform
+any operations on the result. For example:
+
+.. code-block:: python
+
+    from typing import cast, Any
+
+    x = 1
+    x + 'x'   # Type check error
+    y = cast(Any, x)
+    y + 'x'   # Type check OK (runtime error)
diff --git a/docs/source/cheat_sheet.rst b/docs/source/cheat_sheet.rst
new file mode 100644
index 0000000..a1cdb98
--- /dev/null
+++ b/docs/source/cheat_sheet.rst
@@ -0,0 +1,254 @@
+.. _cheat-sheet-py2:
+
+Mypy syntax cheat sheet (Python 2)
+==================================
+
+This document is a quick cheat sheet showing how the `PEP 484 <https://www.python.org/dev/peps/pep-0484/>`_ type
+language represents various common types in Python 2.
+
+.. note::
+
+   Technically many of the type annotations shown below are redundant,
+   because mypy can derive them from the type of the expression.  So
+   many of the examples have a dual purpose: show how to write the
+   annotation, and show the inferred types.
+
+
+Built-in types
+**************
+
+.. code-block:: python
+
+   from typing import List, Set, Dict, Tuple, Text, Optional
+
+   # For simple built-in types, just use the name of the type.
+   x = 1 # type: int
+   x = 1.0 # type: float
+   x = True # type: bool
+   x = "test" # type: str
+   x = u"test" # type: unicode
+
+   # For collections, the name of the type is capitalized, and the
+   # name of the type inside the collection is in brackets.
+   x = [1] # type: List[int]
+   x = set([6, 7]) # type: Set[int]
+
+   # For mappings, we need the types of both keys and values.
+   x = dict(field=2.0) # type: Dict[str, float]
+
+   # For tuples, we specify the types of all the elements.
+   x = (3, "yes", 7.5) # type: Tuple[int, str, float]
+
+   # For textual data, use Text.
+   # This is `unicode` in Python 2 and `str` in Python 3.
+   x = ["string", u"unicode"] # type: List[Text]
+
+   # Use Optional for values that could be None.
+   input_str = f() # type: Optional[str]
+   if input_str is not None:
+      print input_str
+
+
+Functions
+*********
+
+.. code-block:: python
+
+   from typing import Callable, Iterable
+
+   # This is how you annotate a function definition.
+   def stringify(num):
+       # type: (int) -> str
+       """Your function docstring goes here after the type definition."""
+       return str(num)
+
+   # This function has no parameters and also returns nothing. Annotations
+   # can also be placed on the same line as their function headers.
+   def greet_world(): # type: () -> None
+       print "Hello, world!"
+
+   # And here's how you specify multiple arguments.
+   def plus(num1, num2):
+       # type: (int, int) -> int
+       return num1 + num2
+
+   # Add type annotations for kwargs as though they were positional args.
+   def f(num1, my_float=3.5):
+       # type: (int, float) -> float
+       return num1 + my_float
+
+   # An argument can be declared positional-only by giving it a name
+   # starting with two underscores:
+   def quux(__x):
+       # type: (int) -> None
+       pass
+   quux(3)  # Fine
+   quux(__x=3)  # Error
+
+   # This is how you annotate a function value.
+   x = f # type: Callable[[int, float], float]
+
+   # A generator function that yields ints is secretly just a function that
+   # returns an iterable (see below) of ints, so that's how we annotate it.
+   def f(n):
+       # type: (int) -> Iterable[int]
+       i = 0
+       while i < n:
+           yield i
+           i += 1
+
+   # There's alternative syntax for functions with many arguments.
+   def send_email(address,     # type: Union[str, List[str]]
+                  sender,      # type: str
+                  cc,          # type: Optional[List[str]]
+                  bcc,         # type: Optional[List[str]]
+                  subject='',
+                  body=None    # type: List[str]
+                  ):
+       # type: (...) -> bool
+        <code>
+
+
+When you're puzzled or when things are complicated
+**************************************************
+
+.. code-block:: python
+
+   from typing import Union, Any, cast
+
+   # To find out what type mypy infers for an expression anywhere in
+   # your program, wrap it in reveal_type.  Mypy will print an error
+   # message with the type; remove it again before running the code.
+   reveal_type(1) # -> error: Revealed type is 'builtins.int'
+
+   # Use Union when something could be one of a few types.
+   x = [3, 5, "test", "fun"] # type: List[Union[int, str]]
+
+   # Use Any if you don't know the type of something or it's too
+   # dynamic to write a type for.
+   x = mystery_function() # type: Any
+
+   # This is how to deal with varargs.
+   # This makes each positional arg and each keyword arg a 'str'.
+   def call(self, *args, **kwargs):
+            # type: (*str, **str) -> str
+            request = make_request(*args, **kwargs)
+            return self.do_api_query(request)
+   
+   # Use `ignore` to suppress type-checking on a given line, when your
+   # code confuses mypy or runs into an outright bug in mypy.
+   # Good practice is to comment every `ignore` with a bug link
+   # (in mypy, typeshed, or your own code) or an explanation of the issue.
+   x = confusing_function() # type: ignore # https://github.com/python/mypy/issues/1167
+
+   # cast is a helper function for mypy that allows for guidance of how to convert types.
+   # it does not cast at runtime
+   a = [4]
+   b = cast(List[int], a)  # passes fine
+   c = cast(List[str], a)  # passes fine (no runtime check)
+   reveal_type(c)  # -> error: Revealed type is 'builtins.list[builtins.str]'
+   print(c)  # -> [4] the object is not cast
+
+   # if you want dynamic attributes on your class, have it override __setattr__ or __getattr__
+   # in a stub or in your source code.
+   # __setattr__ allows for dynamic assignment to names
+   # __getattr__ allows for dynamic access to names
+   class A:
+       # this will allow assignment to any A.x, if x is the same type as `value`
+       def __setattr__(self, name, value):
+           # type: (str, int) -> None
+           ...
+   a.foo = 42  # works
+   a.bar = 'Ex-parrot'  # fails type checking
+
+   # TODO: explain "Need type annotation for variable" when
+   # initializing with None or an empty container
+
+
+Standard duck types
+*******************
+
+In typical Python code, many functions that can take a list or a dict
+as an argument only need their argument to be somehow "list-like" or
+"dict-like".  A specific meaning of "list-like" or "dict-like" (or
+something-else-like) is called a "duck type", and several duck types
+that are common in idiomatic Python are standardized.
+
+.. code-block:: python
+
+   from typing import Mapping, MutableMapping, Sequence, Iterable
+
+   # Use Iterable for generic iterables (anything usable in `for`),
+   # and Sequence where a sequence (supporting `len` and `__getitem__`) is required.
+   def f(iterable_of_ints):
+       # type: (Iterable[int]) -> List[str]
+       return [str(x) for x in iterator_of_ints]
+   f(range(1, 3))
+
+   # Mapping describes a dict-like object (with `__getitem__`) that we won't mutate,
+   # and MutableMapping one (with `__setitem__`) that we might.
+   def f(my_dict):
+       # type: (Mapping[int, str]) -> List[int]
+       return list(my_dict.keys())
+   f({3: 'yes', 4: 'no'})
+   def f(my_mapping):
+       # type: (MutableMapping[int, str]) -> Set[str]
+       my_dict[5] = 'maybe'
+       return set(my_dict.values())
+   f({3: 'yes', 4: 'no'})
+
+
+Classes
+*******
+
+.. code-block:: python
+
+   class MyClass(object):
+
+       # For instance methods, omit `self`.
+       def my_method(self, num, str1):
+           # type: (int, str) -> str
+           return num * str1
+
+       # The __init__ method doesn't return anything, so it gets return
+       # type None just like any other method that doesn't return anything.
+       def __init__(self):
+           # type: () -> None
+           pass
+
+   # User-defined classes are written with just their own names.
+   x = MyClass() # type: MyClass
+
+
+Other stuff
+***********
+
+.. code-block:: python
+
+   import sys
+   # typing.Match describes regex matches from the re module.
+   from typing import Match, AnyStr, IO
+   x = re.match(r'[0-9]+', "15") # type: Match[str]
+
+   # Use AnyStr for functions that should accept any kind of string
+   # without allowing different kinds of strings to mix.
+   def concat(a, b):
+       # type: (AnyStr, AnyStr) -> AnyStr
+       return a + b
+   concat(u"foo", u"bar")  # type: unicode
+   concat(b"foo", b"bar")  # type: bytes
+
+   # Use IO[] for functions that should accept or return any
+   # object that comes from an open() call. The IO[] does not
+   # distinguish between reading, writing or other modes.
+   def get_sys_IO(mode='w'):
+       # type: (str) -> IO[str]
+       if mode == 'w':
+           return sys.stdout
+       elif mode == 'r':
+           return sys.stdin
+       else:
+           return sys.stdout
+
+   # TODO: add TypeVar and a simple generic function
+
diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst
new file mode 100644
index 0000000..5ef62b2
--- /dev/null
+++ b/docs/source/cheat_sheet_py3.rst
@@ -0,0 +1,307 @@
+.. _cheat-sheet-py3:
+
+Mypy syntax cheat sheet (Python 3)
+==================================
+
+This document is a quick cheat sheet showing how the `PEP 484 <https://www.python.org/dev/peps/pep-0484/>`_ type
+language represents various common types in Python 3. Unless otherwise noted, the syntax is valid on all versions of Python 3.
+
+.. note::
+
+   Technically many of the type annotations shown below are redundant,
+   because mypy can derive them from the type of the expression.  So
+   many of the examples have a dual purpose: show how to write the
+   annotation, and show the inferred types.
+
+
+Built-in types
+**************
+
+.. code-block:: python
+
+   from typing import List, Set, Dict, Tuple, Text, Optional, AnyStr
+
+   # For simple built-in types, just use the name of the type.
+   x = 1  # type: int
+   x = 1.0  # type: float
+   x = True  # type: bool
+   x = "test"  # type: str
+   x = u"test"  # type: str
+   x = b"test"  # type: bytes
+
+   # For collections, the name of the type is capitalized, and the
+   # name of the type inside the collection is in brackets.
+   x = [1]  # type: List[int]
+   x = {6, 7}  # type: Set[int]
+
+   # For mappings, we need the types of both keys and values.
+   x = {'field': 2.0}  # type: Dict[str, float]
+
+   # For tuples, we specify the types of all the elements.
+   x = (3, "yes", 7.5)  # type: Tuple[int, str, float]
+
+   # For textual data, use Text.
+   # This is `unicode` in Python 2 and `str` in Python 3.
+   x = ["string", u"unicode"]  # type: List[Text]
+
+
+
+   # Use Optional for values that could be None.
+   input_str = f()  # type: Optional[str]
+   if input_str is not None:
+      print(input_str)
+
+
+Functions
+*********
+
+Python 3 introduces an annotation syntax for function declarations in `PEP 3107 <https://www.python.org/dev/peps/pep-3107/>`_.
+
+.. code-block:: python
+
+   from typing import Callable, Iterable, Union, Optional, List
+
+   # This is how you annotate a function definition.
+   def stringify(num: int) -> str:
+       return str(num)
+       
+   # And here's how you specify multiple arguments.
+   def plus(num1: int, num2: int) -> int:
+       return num1 + num2
+
+   # Add type annotations for kwargs as though they were positional args.
+   def f(num1: int, my_float: float = 3.5) -> float:
+       return num1 + my_float
+
+   # An argument can be declared positional-only by giving it a name
+   # starting with two underscores:
+   def quux(__x: int) -> None:
+       pass
+   quux(3)  # Fine
+   quux(__x=3)  # Error
+
+   # This is how you annotate a function value.
+   x = f # type: Callable[[int, float], float]
+
+   # A generator function that yields ints is secretly just a function that
+   # returns an iterable (see below) of ints, so that's how we annotate it.
+   def f(n: int) -> Iterable[int]:
+       i = 0
+       while i < n:
+           yield i
+           i += 1
+
+   # For a function with many arguments, you can of course split it over multiple lines
+   def send_email(address: Union[str, List[str]],
+                  sender: str,
+                  cc: Optional[List[str]],
+                  bcc: Optional[List[str]],
+                  subject='',
+                  body: List[str] = None
+                  ) -> bool:
+       
+       ...
+
+
+When you're puzzled or when things are complicated
+**************************************************
+
+.. code-block:: python
+
+   from typing import Union, Any, List, cast
+
+   # To find out what type mypy infers for an expression anywhere in
+   # your program, wrap it in reveal_type.  Mypy will print an error
+   # message with the type; remove it again before running the code.
+   reveal_type(1)  # -> error: Revealed type is 'builtins.int'
+
+   # Use Union when something could be one of a few types.
+   x = [3, 5, "test", "fun"]  # type: List[Union[int, str]]
+
+   # Use Any if you don't know the type of something or it's too
+   # dynamic to write a type for.
+   x = mystery_function()  # type: Any
+
+   # This is how to deal with varargs.
+   # This makes each positional arg and each keyword arg a 'str'.
+   def call(self, *args: str, **kwargs: str) -> str:
+            request = make_request(*args, **kwargs)
+            return self.do_api_query(request)
+
+   # Use `ignore` to suppress type-checking on a given line, when your
+   # code confuses mypy or runs into an outright bug in mypy.
+   # Good practice is to comment every `ignore` with a bug link
+   # (in mypy, typeshed, or your own code) or an explanation of the issue.
+   x = confusing_function()  # type: ignore # https://github.com/python/mypy/issues/1167
+
+   # cast is a helper function for mypy that allows for guidance of how to convert types.
+   # it does not cast at runtime
+   a = [4]
+   b = cast(List[int], a)  # passes fine
+   c = cast(List[str], a)  # passes fine (no runtime check)
+   reveal_type(c)  # -> error: Revealed type is 'builtins.list[builtins.str]'
+   print(c)  # -> [4] the object is not cast
+
+   # if you want dynamic attributes on your class, have it override __setattr__ or __getattr__
+   # in a stub or in your source code.
+   # __setattr__ allows for dynamic assignment to names
+   # __getattr__ allows for dynamic access to names
+   class A:
+       # this will allow assignment to any A.x, if x is the same type as `value`
+       def __setattr__(self, name: str, value: int) -> None: ...
+       # this will allow access to any A.x, if x is compatible with the return type
+       def __getattr__(self, name: str) -> int: ...
+   a.foo = 42  # works
+   a.bar = 'Ex-parrot'  # fails type checking
+
+
+   # TODO: explain "Need type annotation for variable" when
+   # initializing with None or an empty container
+
+
+Standard duck types
+*******************
+
+In typical Python code, many functions that can take a list or a dict
+as an argument only need their argument to be somehow "list-like" or
+"dict-like".  A specific meaning of "list-like" or "dict-like" (or
+something-else-like) is called a "duck type", and several duck types
+that are common in idiomatic Python are standardized.
+
+.. code-block:: python
+
+   from typing import Mapping, MutableMapping, Sequence, Iterable, List, Set
+
+   # Use Iterable for generic iterables (anything usable in `for`),
+   # and Sequence where a sequence (supporting `len` and `__getitem__`) is required.
+   def f(iterable_of_ints: Iterable[int]) -> List[str]:
+       return [str(x) for x in iterable_of_ints]
+   f(range(1, 3))
+
+   # Mapping describes a dict-like object (with `__getitem__`) that we won't mutate,
+   # and MutableMapping one (with `__setitem__`) that we might.
+   def f(my_dict: Mapping[int, str])-> List[int]:
+       return list(my_dict.keys())
+   f({3: 'yes', 4: 'no'})
+   def f(my_mapping: MutableMapping[int, str]) -> Set[str]:
+       my_mapping[5] = 'maybe'
+       return set(my_mapping.values())
+   f({3: 'yes', 4: 'no'})
+
+
+Classes
+*******
+
+.. code-block:: python
+
+   class MyClass:
+       # The __init__ method doesn't return anything, so it gets return
+       # type None just like any other method that doesn't return anything.
+       def __init__(self) -> None:
+           ...
+       # For instance methods, omit `self`.
+       def my_method(self, num: int, str1: str) -> str:
+           return num * str1
+
+
+
+   # User-defined classes are written with just their own names.
+   x = MyClass() # type: MyClass
+
+
+Other stuff
+***********
+
+.. code-block:: python
+
+   import sys
+   import re
+   # typing.Match describes regex matches from the re module.
+   from typing import Match, AnyStr, IO
+   x = re.match(r'[0-9]+', "15")  # type: Match[str]
+
+   # You can use AnyStr to indicate that any string type will work
+   # but not to mix types
+   def full_name(first: AnyStr, last: AnyStr) -> AnyStr:
+       return first+last
+   full_name('Jon','Doe')  # same str ok
+   full_name(b'Bill', b'Bit')  # same binary ok
+   full_name(b'Terry', 'Trouble')  # different str types, fails
+
+   # Use IO[] for functions that should accept or return any
+   # object that comes from an open() call. The IO[] does not
+   # distinguish between reading, writing or other modes.
+   def get_sys_IO(mode='w') -> IO[str]:
+       if mode == 'w':
+           return sys.stdout
+       elif mode == 'r':
+           return sys.stdin
+       else:
+           return sys.stdout
+
+   # forward references are useful if you want to referemce a class before it is designed
+   
+   def f(foo: A) -> int:  # this will fail
+       ...
+   
+   class A:
+       ...
+       
+   # however, using the string 'A', it will pass as long as there is a class of that name later on
+   def f(foo: 'A') -> int:
+       ...
+
+   # TODO: add TypeVar and a simple generic function
+
+Variable Annotation in Python 3.6 with PEP 526
+**********************************************
+
+Python 3.6 brings new syntax for annotating variables with `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_.
+Mypy brings limited support for PEP 526 annotations.
+
+
+.. code-block:: python
+
+   # annotation is similar to arguments to functions
+   name: str = "Eric Idle"
+   
+   # class instances can be annotated as follows
+   mc : MyClass = MyClass()
+   
+   # tuple packing can be done as follows
+   tu: Tuple[str, ...] = ('a', 'b', 'c')
+   
+   # annotations are not checked at runtime
+   year: int = '1972'  # error in type checking, but works at runtime
+   
+   # these are all equivalent
+   hour = 24 # type: int
+   hour: int; hour = 24
+   hour: int = 24
+   
+   # you do not (!) need to initialize a variable to annotate it
+   a: int # ok for type checking and runtime
+   
+   # which is useful in conditional branches
+   child: bool
+   if age < 18:
+       child = True
+   else:
+       child = False
+   
+   # annotations for classes are for instance variables (those created in __init__ or __new__)
+   class Battery:
+       charge_percent: int = 100  # this is an instance variable with a default value
+       capacity: int  # an instance variable without a default
+       
+   # you can use the ClassVar annotation to make the variable a class variable instead of an instance variable.
+   class Car:
+       seats: ClassVar[int] = 4
+       passengers: ClassVar[List[str]]
+       
+    # You can also declare the type of an attribute in __init__
+    class Box:
+        def __init__(self) -> None:
+            self.items: List[str] = []
+   
+Please see :ref:`python-36` for more on mypy's compatability with Python 3.6's new features.
diff --git a/docs/source/class_basics.rst b/docs/source/class_basics.rst
new file mode 100644
index 0000000..dc778d3
--- /dev/null
+++ b/docs/source/class_basics.rst
@@ -0,0 +1,157 @@
+Class basics
+============
+
+Instance and class attributes
+*****************************
+
+Mypy type checker detects if you are trying to access a missing
+attribute, which is a very common programming error. For this to work
+correctly, instance and class attributes must be defined or
+initialized within the class. Mypy infers the types of attributes:
+
+.. code-block:: python
+
+   class A:
+       def __init__(self, x: int) -> None:
+           self.x = x     # Attribute x of type int
+
+   a = A(1)
+   a.x = 2       # OK
+   a.y = 3       # Error: A has no attribute y
+
+This is a bit like each class having an implicitly defined
+``__slots__`` attribute. This is only enforced during type
+checking and not when your program is running.
+
+You can declare types of variables in the class body explicitly using
+a type comment:
+
+.. code-block:: python
+
+   class A:
+       x = None  # type: List[int]  # Declare attribute x of type List[int]
+
+   a = A()
+   a.x = [1]     # OK
+
+As in Python, a variable defined in the class body can used as a class
+or an instance variable.
+
+Similarly, you can give explicit types to instance variables defined
+in a method:
+
+.. code-block:: python
+
+   class A:
+       def __init__(self) -> None:
+           self.x = []  # type: List[int]
+
+       def f(self) -> None:
+           self.y = 0  # type: Any
+
+You can only define an instance variable within a method if you assign
+to it explicitly using ``self``:
+
+.. code-block:: python
+
+   class A:
+       def __init__(self) -> None:
+           self.y = 1   # Define y
+           a = self
+           a.x = 1      # Error: x not defined
+
+Overriding statically typed methods
+***********************************
+
+When overriding a statically typed method, mypy checks that the
+override has a compatible signature:
+
+.. code-block:: python
+
+   class A:
+       def f(self, x: int) -> None:
+           ...
+
+   class B(A):
+       def f(self, x: str) -> None:   # Error: type of x incompatible
+           ...
+
+   class C(A):
+       def f(self, x: int, y: int) -> None:  # Error: too many arguments
+           ...
+
+   class D(A):
+       def f(self, x: int) -> None:   # OK
+           ...
+
+.. note::
+
+   You can also vary return types **covariantly** in overriding. For
+   example, you could override the return type ``object`` with a subtype
+   such as ``int``.
+
+You can also override a statically typed method with a dynamically
+typed one. This allows dynamically typed code to override methods
+defined in library classes without worrying about their type
+signatures.
+
+There is no runtime enforcement that the method override returns a
+value that is compatible with the original return type, since
+annotations have no effect at runtime:
+
+.. code-block:: python
+
+   class A:
+       def inc(self, x: int) -> int:
+           return x + 1
+
+   class B(A):
+       def inc(self, x):       # Override, dynamically typed
+           return 'hello'
+
+   b = B()
+   print(b.inc(1))   # hello
+   a = b # type: A
+   print(a.inc(1))   # hello
+
+Abstract base classes and multiple inheritance
+**********************************************
+
+Mypy uses Python abstract base classes for protocol types. There are
+several built-in abstract base classes types (for example,
+``Sequence``, ``Iterable`` and ``Iterator``). You can define abstract
+base classes using the ``abc.ABCMeta`` metaclass and the
+``abc.abstractmethod`` function decorator.
+
+.. code-block:: python
+
+   from abc import ABCMeta, abstractmethod
+   import typing
+
+   class A(metaclass=ABCMeta):
+       @abstractmethod
+       def foo(self, x: int) -> None: pass
+
+       @abstractmethod
+       def bar(self) -> str: pass
+
+   class B(A):
+       def foo(self, x: int) -> None: ...
+       def bar(self) -> str:
+           return 'x'
+
+   a = A() # Error: A is abstract
+   b = B() # OK
+
+Unlike most Python code, abstract base classes are likely to play a
+significant role in many complex mypy programs.
+
+A class can inherit any number of classes, both abstract and
+concrete. As with normal overrides, a dynamically typed method can
+implement a statically typed abstract method defined in an abstract
+base class.
+
+.. note::
+
+   There are also plans to support more Python-style "duck typing" in
+   the type system. The details are still open.
diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst
new file mode 100644
index 0000000..5df9df2
--- /dev/null
+++ b/docs/source/command_line.rst
@@ -0,0 +1,484 @@
+.. _command-line:
+
+The mypy command line
+=====================
+
+This section documents many of mypy's command line flags.  A quick
+summary of command line flags can always be printed using the ``-h``
+flag (or its long form ``--help``)::
+
+  $ mypy -h
+  usage: mypy [-h] [-v] [-V] [--python-version x.y] [--platform PLATFORM] [-2]
+              [--ignore-missing-imports]
+              [--follow-imports {normal,silent,skip,error}]
+              [--disallow-any {unimported, expr, unannotated, decorated, explicit, generics}]
+              [--disallow-untyped-calls] [--disallow-untyped-defs]
+              [--check-untyped-defs] [--disallow-subclassing-any]
+              [--warn-incomplete-stub] [--warn-redundant-casts]
+              [--no-warn-no-return] [--warn-return-any] [--warn-unused-ignores]
+              [--show-error-context] [--no-implicit-optional] [-i]
+              [--quick-and-dirty] [--cache-dir DIR] [--skip-version-check]
+              [--strict-optional]
+              [--strict-optional-whitelist [GLOB [GLOB ...]]]
+              [--junit-xml JUNIT_XML] [--pdb] [--show-traceback] [--stats]
+              [--inferstats] [--custom-typing MODULE]
+              [--custom-typeshed-dir DIR] [--scripts-are-modules]
+              [--config-file CONFIG_FILE] [--show-column-numbers]
+              [--find-occurrences CLASS.MEMBER] [--strict]
+              [--shadow-file SOURCE_FILE SHADOW_FILE] [--any-exprs-report DIR]
+              [--cobertura-xml-report DIR] [--html-report DIR]
+              [--linecount-report DIR] [--linecoverage-report DIR]
+              [--memory-xml-report DIR] [--old-html-report DIR]
+              [--txt-report DIR] [--xml-report DIR] [--xslt-html-report DIR]
+              [--xslt-txt-report DIR] [-m MODULE] [-c PROGRAM_TEXT] [-p PACKAGE]
+              [files [files ...]]
+
+  (etc., too long to show everything here)
+
+Specifying files and directories to be checked
+**********************************************
+
+You've already seen ``mypy program.py`` as a way to type check the
+file ``program.py``.  More generally you can pass any number of files
+and directories on the command line and they will all be type checked
+together.
+
+- Files ending in ``.py`` (and stub files ending in ``.pyi``) are
+  checked as Python modules.
+
+- Files not ending in ``.py`` or ``.pyi`` are assumed to be Python
+  scripts and checked as such.
+
+- Directories representing Python packages (i.e. containing a
+  ``__init__.py[i]`` file) are checked as Python packages; all
+  submodules and subpackages will be checked (subpackages must
+  themselves have a ``__init__.py[i]`` file).
+
+- Directories that don't represent Python packages (i.e. not directly
+  containing an ``__init__.py[i]`` file) are checked as follows:
+
+  - All ``*.py[i]`` files contained directly therein are checked as
+    toplevel Python modules;
+
+  - All packages contained directly therein (i.e. immediate
+    subdirectories with an ``__init__.py[i]`` file) are checked as
+    toplevel Python packages.
+
+One more thing about checking modules and packages: if the directory
+*containing* a module or package specified on the command line has an
+``__init__.py[i]`` file, mypy assigns these an absolute module name by
+crawling up the path until no ``__init__.py[i]`` file is found.  For
+example, suppose we run the command ``mypy foo/bar/baz.py`` where
+``foo/bar/__init__.py`` exists but ``foo/__init__.py`` does not.  Then
+the module name assumed is ``bar.baz`` and the directory ``foo`` is
+added to mypy's module search path.  On the other hand, if
+``foo/bar/__init__.py`` did not exist, ``foo/bar`` would be added to
+the module search path instead, and the module name assumed is just
+``baz``.
+
+If a script (a file not ending in ``.py[i]``) is processed, the module
+name assumed is always ``__main__`` (matching the behavior of the
+Python interpreter).
+
+Other ways of specifying code to be checked
+*******************************************
+
+The flag ``-m`` (long form: ``--module``) lets you specify a module
+name to be found using the default module search path.  The module
+name may contain dots.  For example::
+
+  $ mypy -m html.parser
+
+will type check the module ``html.parser`` (this happens to be a
+library stub).
+
+The flag ``-p`` (long form: ``--package``) is similar to ``-m`` but
+you give it a package name and it will type check all submodules and
+subpackages (recursively) of that package.  (If you pass a package
+name to ``-m`` it will just type check the package's ``__init__.py``
+and anything imported from there.)  For example::
+
+  $ mypy -p html
+
+will type check the entire ``html`` package (of library stubs).
+
+Finally the flag ``-c`` (long form: ``--command``) will take a string
+from the command line and type check it as a small program.  For
+example::
+
+  $ mypy -c 'x = [1, 2]; print(x())'
+
+will type check that little program (and complain that ``List[int]``
+is not callable).
+
+Reading a list of files from a file
+***********************************
+
+Finally, any command-line argument starting with ``@`` reads additional
+command-line arguments from the file following the ``@`` character.
+This is primarily useful if you have a file containing a list of files
+that you want to be type-checked: instead of using shell syntax like::
+
+  mypy $(cat file_of_files)
+
+you can use this instead::
+
+  mypy @file_of_files
+
+Such a file can also contain other flags, but a preferred way of
+reading flags (not files) from a file is to use a
+:ref:`configuration file <config-file>`.
+
+
+.. _finding-imports:
+
+How imports are found
+*********************
+
+When mypy encounters an `import` statement it tries to find the module
+on the file system, similar to the way Python finds it.
+However, there are some differences.
+
+First, mypy has its own search path.
+This is computed from the following items:
+
+- The ``MYPYPATH`` environment variable
+  (a colon-separated list of directories).
+- The directories containing the sources given on the command line
+  (see below).
+- The relevant directories of the
+  `typeshed <https://github.com/python/typeshed>`_ repo.
+
+For sources given on the command line, the path is adjusted by crawling
+up from the given file or package to the nearest directory that does not
+contain an ``__init__.py`` or ``__init__.pyi`` file.
+
+Second, mypy searches for stub files in addition to regular Python files
+and packages.
+The rules for searching a module ``foo`` are as follows:
+
+- The search looks in each of the directories in the search path
+  (see above) until a match is found.
+- If a package named ``foo`` is found (i.e. a directory
+  ``foo`` containing an ``__init__.py`` or ``__init__.pyi`` file)
+  that's a match.
+- If a stub file named ``foo.pyi`` is found, that's a match.
+- If a Python module named ``foo.py`` is found, that's a match.
+
+These matches are tried in order, so that if multiple matches are found
+in the same directory on the search path
+(e.g. a package and a Python file, or a stub file and a Python file)
+the first one in the above list wins.
+
+In particular, if a Python file and a stub file are both present in the
+same directory on the search path, only the stub file is used.
+(However, if the files are in different directories, the one found
+in the earlier directory is used.)
+
+NOTE: These rules are relevant to the following section too:
+the ``--follow-imports`` flag described below is applied _after_ the
+above algorithm has determined which package, stub or module to use.
+
+.. _follow-imports:
+
+Following imports or not?
+*************************
+
+When you're first attacking a large existing codebase with mypy, you
+may only want to check selected files.  For example, you may only want
+to check those files to which you have already added annotations.
+This is easily accomplished using a shell pipeline like this::
+
+  mypy $(find . -name \*.py | xargs grep -l '# type:')
+
+(While there are many improvements possible to make this example more
+robust, this is not the place for a tutorial in shell programming.)
+
+However, by default mypy doggedly tries to :ref:`follow imports
+<finding-imports>`.  This may cause several types of problems that you
+may want to silence during your initial conquest:
+
+- Your code may import library modules for which no stub files exist
+  yet.  This can cause a lot of errors like the following::
+
+    main.py:1: error: No library stub file for standard library module 'antigravity'
+    main.py:2: error: No library stub file for module 'flask'
+    main.py:3: error: Cannot find module named 'sir_not_appearing_in_this_film'
+
+  If you see only a few of these you may be able to silence them by
+  putting ``# type: ignore`` on the respective ``import`` statements,
+  but it's usually easier to silence all such errors by using
+  :ref:`--ignore-missing-imports <ignore-missing-imports>`.
+
+- Your project's directory structure may hinder mypy in finding
+  certain modules that are part of your project, e.g. modules hidden
+  away in a subdirectory that's not a package.  You can usually deal
+  with this by setting the ``MYPYPATH`` variable (see
+  :ref:`finding-imports`).
+
+- When following imports mypy may find a module that's part of your
+  project but which you haven't annotated yet, mypy may report errors
+  for the top level code in that module (where the top level includes
+  class bodies and function/method default values).  Here the
+  ``--follow-imports`` flag comes in handy.
+
+The ``--follow-imports`` flag takes a mandatory string value that can
+take one of four values.  It only applies to modules for which a
+``.py`` file is found (but no corresponding ``.pyi`` stub file) and
+that are not given on the command line.  Passing a package or
+directory on the command line implies all modules in that package or
+directory.  The four possible values are:
+
+- ``normal`` (the default) follow imports normally and type check all
+  top level code (as well as the bodies of all functions and methods
+  with at least one type annotation in the signature).
+
+- ``silent`` follow imports normally and even "type check" them
+  normally, but *suppress any error messages*. This is typically the
+  best option for a new codebase.
+
+- ``skip`` *don't* follow imports, silently replacing the module (and
+  everything imported *from* it) with an object of type ``Any``.
+  (This option used to be known as ``--silent-imports`` and while it
+  is very powerful it can also cause hard-to-debug errors, hence the
+  recommendation of using ``silent`` instead.)
+
+- ``error`` the same behavior as ``skip`` but not quite as silent --
+  it flags the import as an error, like this::
+
+    main.py:1: note: Import of 'submodule' ignored
+    main.py:1: note: (Using --follow-imports=error, module not passed on command line)
+
+
+Additional command line flags
+*****************************
+
+Here are some more useful flags:
+
+.. _ignore-missing-imports:
+
+- ``--ignore-missing-imports`` suppresses error messages about imports
+  that cannot be resolved (see :ref:`follow-imports` for some examples).
+
+- ``--strict-optional`` enables experimental strict checking of ``Optional[...]``
+  types and ``None`` values. Without this option, mypy doesn't generally check the
+  use of ``None`` values -- they are valid everywhere. See :ref:`strict_optional` for
+  more about this feature.
+
+- ``--strict-optional-whitelist`` attempts to suppress strict Optional-related
+  errors in non-whitelisted files.  Takes an arbitrary number of globs as the
+  whitelist.  This option is intended to be used to incrementally roll out
+  ``--strict-optional`` to a large codebase that already has mypy annotations.
+  However, this flag comes with some significant caveats.  It does not suppress
+  all errors caused by turning on ``--strict-optional``, only most of them, so
+  there may still be a bit of upfront work to be done before it can be used in
+  CI.  It will also suppress some errors that would be caught in a
+  non-strict-Optional run.  Therefore, when using this flag, you should also
+  re-check your code without ``--strict-optional`` to ensure new type errors
+  are not introduced.
+
+.. _disallow-any:
+
+- ``--disallow-any`` disallows various types of ``Any`` in a module.
+  The option takes a comma-separated list of the following values:
+  ``unimported``, ``unannotated``, ``expr``, ``decorated``, ``explicit``,
+  ``generics``.
+
+  ``unimported`` disallows usage of types that come from unfollowed imports
+  (such types become aliases for ``Any``). Unfollowed imports occur either
+  when the imported module does not exist or when ``--follow-imports=skip``
+  is set.
+
+  ``unannotated`` disallows function definitions that are not fully
+  typed (i.e. that are missing an explicit type annotation for any
+  of the parameters or the return type). ``unannotated`` option is
+  interchangeable with ``--disallow-untyped-defs``.
+
+  ``expr`` disallows all expressions in the module that have type ``Any``.
+  If an expression of type ``Any`` appears anywhere in the module
+  mypy will output an error unless the expression is immediately
+  used as an argument to ``cast`` or assigned to a variable with an
+  explicit type annotation. In addition, declaring a variable of type ``Any``
+  or casting to type ``Any`` is not allowed. Note that calling functions
+  that take parameters of type ``Any`` is still allowed.
+
+  ``decorated`` disallows functions that have ``Any`` in their signature
+  after decorator transformation.
+
+  ``explicit`` disallows explicit ``Any`` in type positions such as type
+  annotations and generic type parameters.
+
+  ``generics`` disallows usage of generic types that do not specify explicit
+  type parameters. Moreover, built-in collections (such as ``list`` and
+  ``dict``) become disallowed as you should use their aliases from the typing
+  module (such as ``List[int]`` and ``Dict[str, str]``).
+
+- ``--disallow-untyped-defs`` reports an error whenever it encounters
+  a function definition without type annotations.
+
+- ``--check-untyped-defs`` is less severe than the previous option --
+  it type checks the body of every function, regardless of whether it
+  has type annotations.  (By default the bodies of functions without
+  annotations are not type checked.)  It will assume all arguments
+  have type ``Any`` and always infer ``Any`` as the return type.
+
+- ``--disallow-untyped-calls`` reports an error whenever a function
+  with type annotations calls a function defined without annotations.
+
+.. _disallow-subclassing-any:
+
+- ``--disallow-subclassing-any`` reports an error whenever a class
+  subclasses a value of type ``Any``.  This may occur when the base
+  class is imported from a module that doesn't exist (when using
+  :ref:`--ignore-missing-imports <ignore-missing-imports>`) or is
+  ignored due to :ref:`--follow-imports=skip <follow-imports>` or a
+  ``# type: ignore`` comment on the ``import`` statement.  Since the
+  module is silenced, the imported class is given a type of ``Any``.
+  By default mypy will assume that the subclass correctly inherited
+  the base class even though that may not actually be the case.  This
+  flag makes mypy raise an error instead.
+
+.. _incremental:
+
+- ``--incremental`` is an experimental option that enables a module
+  cache. When enabled, mypy caches results from previous runs
+  to speed up type checking. Incremental mode can help when most parts
+  of your program haven't changed since the previous mypy run.  A
+  companion flag is ``--cache-dir DIR``, which specifies where the
+  cache files are written.  By default this is ``.mypy_cache`` in the
+  current directory.  While the cache is only read in incremental
+  mode, it is written even in non-incremental mode, in order to "warm"
+  the cache.  To disable writing the cache, use
+  ``--cache-dir=/dev/null`` (UNIX) or ``--cache-dir=nul`` (Windows).
+  Cache files belonging to a different mypy version are ignored.
+
+.. _quick-mode:
+
+- ``--quick-and-dirty`` is an experimental, unsafe variant of
+  :ref:`incremental mode <incremental>`.  Quick mode is faster than
+  regular incremental mode, because it only re-checks modules that
+  were modified since their cache file was last written (regular
+  incremental mode also re-checks all modules that depend on one or
+  more modules that were re-checked).  Quick mode is unsafe because it
+  may miss problems caused by a change in a dependency.  Quick mode
+  updates the cache, but regular incremental mode ignores cache files
+  written by quick mode.
+
+- ``--python-version X.Y`` will make mypy typecheck your code as if it were
+  run under Python version X.Y. Without this option, mypy will default to using
+  whatever version of Python is running mypy. Note that the ``-2`` and
+  ``--py2`` flags are aliases for ``--python-version 2.7``. See
+  :ref:`version_and_platform_checks` for more about this feature.
+
+- ``--platform PLATFORM`` will make mypy typecheck your code as if it were
+  run under the the given operating system. Without this option, mypy will
+  default to using whatever operating system you are currently using. See
+  :ref:`version_and_platform_checks` for more about this feature.
+
+- ``--show-column-numbers`` will add column offsets to error messages,
+  for example, the following indicates an error in line 12, column 9
+  (note that column offsets are 0-based):
+
+  .. code-block:: python
+
+     main.py:12:9: error: Unsupported operand types for / ("int" and "str")
+
+- ``--scripts-are-modules`` will give command line arguments that
+  appear to be scripts (i.e. files whose name does not end in ``.py``)
+  a module name derived from the script name rather than the fixed
+  name ``__main__``.  This allows checking more than one script in a
+  single mypy invocation.  (The default ``__main__`` is technically
+  more correct, but if you have many scripts that import a large
+  package, the behavior enabled by this flag is often more
+  convenient.)
+
+- ``--custom-typeshed-dir DIR`` specifies the directory where mypy looks for
+  typeshed stubs, instead of the typeshed that ships with mypy.  This is
+  primarily intended to make it easier to test typeshed changes before
+  submitting them upstream, but also allows you to use a forked version of
+  typeshed.
+
+.. _config-file-flag:
+
+- ``--config-file CONFIG_FILE`` causes configuration settings to be
+  read from the given file.  By default settings are read from ``mypy.ini``
+  or ``setup.cfg`` in the current directory.  Settings override mypy's
+  built-in defaults and command line flags can override settings.
+  See :ref:`config-file` for the syntax of configuration files.
+
+- ``--junit-xml JUNIT_XML`` will make mypy generate a JUnit XML test
+  result document with type checking results. This can make it easier
+  to integrate mypy with continuous integration (CI) tools.
+
+- ``--find-occurrences CLASS.MEMBER`` will make mypy print out all
+  usages of a class member based on static type information. This
+  feature is experimental.
+
+- ``--cobertura-xml-report DIR`` causes mypy to generate a Cobertura
+  XML type checking coverage report.
+
+- ``--warn-no-return`` causes mypy to generate errors for missing return
+  statements on some execution paths. Mypy doesn't generate these errors
+  for functions with ``None`` or ``Any`` return types. Mypy
+  also currently ignores functions with an empty body or a body that is
+  just ellipsis (``...``), since these can be valid as abstract methods.
+  This option is on by default.
+
+- ``--warn-return-any`` causes mypy to generate a warning when returning a value
+  with type ``Any`` from a function declared with a non- ``Any`` return type.
+
+- ``--strict`` mode enables all optional error checking flags.  You can see the
+  list of flags enabled by strict mode in the full ``mypy -h`` output.
+
+.. _shadow-file:
+
+- ``--shadow-file SOURCE_FILE SHADOW_FILE`` makes mypy typecheck SHADOW_FILE in
+  place of SOURCE_FILE.  Primarily intended for tooling.  Allows tooling to
+  make transformations to a file before type checking without having to change
+  the file in-place.  (For example, tooling could use this to display the type
+  of an expression by wrapping it with a call to reveal_type in the shadow
+  file and then parsing the output.)
+
+.. _no-implicit-optional:
+
+- ``--no-implicit-optional`` causes mypy to stop treating arguments
+  with a ``None`` default value as having an implicit ``Optional[...]``
+  type.
+
+For the remaining flags you can read the full ``mypy -h`` output.
+
+.. note::
+
+   Command line flags are liable to change between releases.
+
+.. _integrating-mypy:
+
+Integrating mypy into another Python application
+************************************************
+
+It is possible to integrate mypy into another Python 3 application by
+importing ``mypy.api`` and calling the ``run`` function with a parameter of type ``List[str]``, containing
+what normally would have been the command line arguments to mypy.
+
+Function ``run`` returns a ``Tuple[str, str, int]``, namely
+``(<normal_report>, <error_report>, <exit_status>)``, in which ``<normal_report>``
+is what mypy normally writes to ``sys.stdout``, ``<error_report>`` is what mypy
+normally writes to ``sys.stderr`` and ``exit_status`` is the exit status mypy normally
+returns to the operating system.
+
+A trivial example of using the api is the following::
+
+    import sys
+    from mypy import api
+
+    result = api.run(sys.argv[1:])
+
+    if result[0]:
+        print('\nType checking report:\n')
+        print(result[0])  # stdout
+
+    if result[1]:
+        print('\nError report:\n')
+        print(result[1])  # stderr
+
+    print ('\nExit status:', result[2])
diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst
new file mode 100644
index 0000000..0c8b500
--- /dev/null
+++ b/docs/source/common_issues.rst
@@ -0,0 +1,432 @@
+.. _common_issues:
+
+Common issues
+=============
+
+This section has examples of cases when you need to update your code
+to use static typing, and ideas for working around issues if mypy
+doesn't work as expected. Statically typed code is often identical to
+normal Python code, but sometimes you need to do things slightly
+differently.
+
+Can't install mypy using pip
+----------------------------
+
+If installation fails, you've probably hit one of these issues:
+
+* Mypy needs Python 3.3 or later to run.
+* You may have to run pip like this:
+  ``python3 -m pip install mypy``.
+
+.. _annotations_needed:
+
+No errors reported for obviously wrong code
+-------------------------------------------
+
+There are several common reasons why obviously wrong code is not
+flagged as an error.
+
+- **The function containing the error is not annotated.** Functions that
+  do not have any annotations (neither for any argument nor for the
+  return type) are not type-checked, and even the most blatant type
+  errors (e.g. ``2 + 'a'``) pass silently.  The solution is to add
+  annotations.
+
+  Example:
+
+  .. code-block:: python
+
+      def foo(a):
+          return '(' + a.split() + ')'  # No error!
+
+  This gives no error even though ``a.split()`` is "obviously" a list
+  (the author probably meant ``a.strip()``).  The error is reported
+  once you add annotations:
+
+  .. code-block:: python
+
+      def foo(a: str) -> str:
+          return '(' + a.split() + ')'
+      # error: Unsupported operand types for + ("str" and List[str])
+
+  If you don't know what types to add, you can use ``Any``, but beware:
+
+- **One of the values involved has type ``Any``.** Extending the above
+  example, if we were to leave out the annotation for ``a``, we'd get
+  no error:
+
+  .. code-block:: python
+
+      def foo(a) -> str:
+          return '(' + a.split() + ')'  # No error!
+
+  The reason is that if the type of ``a`` is unknown, the type of
+  ``a.split()`` is also unknown, so it is inferred as having type
+  ``Any``, and it is no error to add a string to an ``Any``.
+
+  If you're having trouble debugging such situations,
+  :ref:`reveal_type() <reveal-type>` might come in handy.
+
+  Note that sometimes library stubs have imprecise type information,
+  e.g. the ``pow()`` builtin returns ``Any`` (see `typeshed issue 285
+  <https://github.com/python/typeshed/issues/285>`_ for the reason).
+
+- **Some imports may be silently ignored**.  Another source of
+  unexpected ``Any`` values are the :ref:`"--ignore-missing-imports"
+  <ignore-missing-imports>` and :ref:`"--follow-imports=skip"
+  <follow-imports>` flags.  When you use ``--ignore-missing-imports``,
+  any imported module that cannot be found is silently replaced with
+  ``Any``.  When using ``--follow-imports=skip`` the same is true for
+  modules for which a ``.py`` file is found but that are not specified
+  on the command line.  (If a ``.pyi`` stub is found it is always
+  processed normally, regardless of the value of
+  ``--follow-imports``.)  To help debug the former situation (no
+  module found at all) leave out ``--ignore-missing-imports``; to get
+  clarity about the latter use ``--follow-imports=error``.  You can
+  read up about these and other useful flags in :ref:`command-line`.
+
+.. _silencing_checker:
+
+Spurious errors and locally silencing the checker
+-------------------------------------------------
+
+You can use a ``# type: ignore`` comment to silence the type checker
+on a particular line. For example, let's say our code is using
+the C extension module ``frobnicate``, and there's no stub available.
+Mypy will complain about this, as it has no information about the
+module:
+
+.. code-block:: python
+
+    import frobnicate  # Error: No module "frobnicate"
+    frobnicate.start()
+
+You can add a ``# type: ignore`` comment to tell mypy to ignore this
+error:
+
+.. code-block:: python
+
+    import frobnicate  # type: ignore
+    frobnicate.start()  # Okay!
+
+The second line is now fine, since the ignore comment causes the name
+``frobnicate`` to get an implicit ``Any`` type.
+
+.. note::
+
+    The ``# type: ignore`` comment will only assign the implicit ``Any``
+    type if mypy cannot find information about that particular module. So,
+    if we did have a stub available for ``frobnicate`` then mypy would
+    ignore the ``# type: ignore`` comment and typecheck the stub as usual.
+
+Types of empty collections
+--------------------------
+
+You often need to specify the type when you assign an empty list or
+dict to a new variable, as mentioned earlier:
+
+.. code-block:: python
+
+   a = []  # type: List[int]
+
+Without the annotation mypy can't always figure out the
+precise type of ``a``.
+
+You can use a simple empty list literal in a dynamically typed function (as the
+type of ``a`` would be implicitly ``Any`` and need not be inferred), if type
+of the variable has been declared or inferred before, or if you perform a simple
+modification operation in the same scope (such as ``append`` for a list):
+
+.. code-block:: python
+
+   a = []  # Okay because followed by append, inferred type List[int]
+   for i in range(n):
+       a.append(i * i)
+
+However, in more complex cases an explicit type annotation can be
+required (mypy will tell you this). Often the annotation can
+make your code easier to understand, so it doesn't only help mypy but
+everybody who is reading the code!
+
+Redefinitions with incompatible types
+-------------------------------------
+
+Each name within a function only has a single 'declared' type. You can
+reuse for loop indices etc., but if you want to use a variable with
+multiple types within a single function, you may need to declare it
+with the ``Any`` type.
+
+.. code-block:: python
+
+   def f() -> None:
+       n = 1
+       ...
+       n = 'x'        # Type error: n has type int
+
+.. note::
+
+   This limitation could be lifted in a future mypy
+   release.
+
+Note that you can redefine a variable with a more *precise* or a more
+concrete type. For example, you can redefine a sequence (which does
+not support ``sort()``) as a list and sort it in-place:
+
+.. code-block:: python
+
+    def f(x: Sequence[int]) -> None:
+        # Type of x is Sequence[int] here; we don't know the concrete type.
+        x = list(x)
+        # Type of x is List[int] here.
+        x.sort()  # Okay!
+
+.. _variance:
+
+Invariance vs covariance
+------------------------
+
+Most mutable generic collections are invariant, and mypy considers all
+user-defined generic classes invariant by default
+(see :ref:`variance-of-generics` for motivation). This could lead to some
+unexpected errors when combined with type inference. For example:
+
+.. code-block:: python
+
+   class A: ...
+   class B(A): ...
+
+   lst = [A(), A()]  # Inferred type is List[A]
+   new_lst = [B(), B()]  # inferred type is List[B]
+   lst = new_lst  # mypy will complain about this, because List is invariant
+
+Possible strategies in such situations are:
+
+* Use an explicit type annotation:
+
+  .. code-block:: python
+
+     new_lst: List[A] = [B(), B()]
+     lst = new_lst  # OK
+
+* Make a copy of the right hand side:
+
+  .. code-block:: python
+
+     lst = list(new_lst) # Also OK
+
+* Use immutable collections as annotations whenever possible:
+
+  .. code-block:: python
+
+     def f_bad(x: List[A]) -> A:
+         return x[0]
+     f_bad(new_lst) # Fails
+
+     def f_good(x: Sequence[A]) -> A:
+         return x[0]
+     f_good(new_lst) # OK
+
+Declaring a supertype as variable type
+--------------------------------------
+
+Sometimes the inferred type is a subtype (subclass) of the desired
+type. The type inference uses the first assignment to infer the type
+of a name (assume here that ``Shape`` is the base class of both
+``Circle`` and ``Triangle``):
+
+.. code-block:: python
+
+   shape = Circle()    # Infer shape to be Circle
+   ...
+   shape = Triangle()  # Type error: Triangle is not a Circle
+
+You can just give an explicit type for the variable in cases such the
+above example:
+
+.. code-block:: python
+
+   shape = Circle() # type: Shape   # The variable s can be any Shape,
+                                    # not just Circle
+   ...
+   shape = Triangle()               # OK
+
+Complex type tests
+------------------
+
+Mypy can usually infer the types correctly when using ``isinstance()``
+type tests, but for other kinds of checks you may need to add an
+explicit type cast:
+
+.. code-block:: python
+
+   def f(o: object) -> None:
+       if type(o) is int:
+           o = cast(int, o)
+           g(o + 1)    # This would be an error without the cast
+           ...
+       else:
+           ...
+
+.. note::
+
+    Note that the ``object`` type used in the above example is similar
+    to ``Object`` in Java: it only supports operations defined for *all*
+    objects, such as equality and ``isinstance()``. The type ``Any``,
+    in contrast, supports all operations, even if they may fail at
+    runtime. The cast above would have been unnecessary if the type of
+    ``o`` was ``Any``.
+
+Mypy can't infer the type of ``o`` after the ``type()`` check
+because it only knows about ``isinstance()`` (and the latter is better
+style anyway).  We can write the above code without a cast by using
+``isinstance()``:
+
+.. code-block:: python
+
+   def f(o: object) -> None:
+       if isinstance(o, int):  # Mypy understands isinstance checks
+           g(o + 1)        # Okay; type of o is inferred as int here
+           ...
+
+Type inference in mypy is designed to work well in common cases, to be
+predictable and to let the type checker give useful error
+messages. More powerful type inference strategies often have complex
+and difficult-to-predict failure modes and could result in very
+confusing error messages. The tradeoff is that you as a programmer
+sometimes have to give the type checker a little help.
+
+.. _version_and_platform_checks:
+
+Python version and system platform checks
+-----------------------------------------
+
+Mypy supports the ability to perform Python version checks and platform
+checks (e.g. Windows vs Posix), ignoring code paths that won't be run on
+the targeted Python version or platform. This allows you to more effectively
+typecheck code that supports multiple versions of Python or multiple operating
+systems.
+
+More specifically, mypy will understand the use of ``sys.version_info`` and
+``sys.platform`` checks within ``if/elif/else`` statements. For example:
+
+.. code-block:: python
+
+   import sys
+
+   # Distinguishing between different versions of Python:
+   if sys.version_info >= (3, 5):
+       # Python 3.5+ specific definitions and imports
+   elif sys.version_info[0] >= 3:
+       # Python 3 specific definitions and imports
+   else:
+       # Python 2 specific definitions and imports
+
+   # Distinguishing between different operating systems:
+   if sys.platform.startswith("linux"):
+       # Linux-specific code
+   elif sys.platform == "darwin":
+       # Mac-specific code
+   elif sys.platform == "win32":
+       # Windows-specific code
+   else:
+       # Other systems
+
+.. note::
+
+   Mypy currently does not support more complex checks, and does not assign
+   any special meaning when assigning a ``sys.version_info`` or ``sys.platform``
+   check to a variable. This may change in future versions of mypy.
+
+By default, mypy will use your current version of Python and your current
+operating system as default values for ``sys.version_info`` and
+``sys.platform``.
+
+To target a different Python version, use the ``--python-version X.Y`` flag.
+For example, to verify your code typechecks if were run using Python 2, pass
+in ``--python-version 2.7`` from the command line. Note that you do not need
+to have Python 2.7 installed to perform this check.
+
+To target a different operating system, use the ``--platform PLATFORM`` flag.
+For example, to verify your code typechecks if it were run in Windows, pass
+in ``--platform win32``. See the documentation for
+`sys.platform <https://docs.python.org/3/library/sys.html#sys.platform>`_
+for examples of valid platform parameters.
+
+.. _reveal-type:
+
+Displaying the type of an expression
+------------------------------------
+
+You can use ``reveal_type(expr)`` to ask mypy to display the inferred
+static type of an expression. This can be useful when you don't quite
+understand how mypy handles a particular piece of code. Example:
+
+.. code-block:: python
+
+   reveal_type((1, 'hello'))  # Revealed type is 'Tuple[builtins.int, builtins.str]'
+
+.. note::
+
+   ``reveal_type`` is only understood by mypy and doesn't exist
+   in Python, if you try to run your program. You'll have to remove
+   any ``reveal_type`` calls before you can run your code.
+   ``reveal_type`` is always available and you don't need to import it.
+
+.. _import-cycles:
+
+Import cycles
+-------------
+
+An import cycle occurs where module A imports module B and module B
+imports module A (perhaps indirectly, e.g. ``A -> B -> C -> A``).
+Sometimes in order to add type annotations you have to add extra
+imports to a module and those imports cause cycles that didn't exist
+before.  If those cycles become a problem when running your program,
+there's a trick: if the import is only needed for type annotations in
+forward references (string literals) or comments, you can write the
+imports inside ``if TYPE_CHECKING:`` so that they are not executed at runtime.
+Example:
+
+File ``foo.py``:
+
+.. code-block:: python
+
+   from typing import List, TYPE_CHECKING
+
+   if TYPE_CHECKING:
+       import bar
+
+   def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]':
+       return [arg]
+
+File ``bar.py``:
+
+.. code-block:: python
+
+   from typing import List
+   from foo import listify
+
+   class BarClass:
+       def listifyme(self) -> 'List[BarClass]':
+           return listify(self)
+
+.. note::
+
+   The ``TYPE_CHECKING`` constant defined by the ``typing`` module
+   is ``False`` at runtime but ``True`` while type checking.
+
+Python 3.5.1 doesn't have ``typing.TYPE_CHECKING``. An alternative is
+to define a constant named ``MYPY`` that has the value ``False``
+at runtime. Mypy considers it to be ``True`` when type checking.
+Here's the above example modified to use ``MYPY``:
+
+.. code-block:: python
+
+   from typing import List
+
+   MYPY = False
+   if MYPY:
+       import bar
+
+   def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]':
+       return [arg]
diff --git a/docs/source/conf.py b/docs/source/conf.py
new file mode 100644
index 0000000..cf64842
--- /dev/null
+++ b/docs/source/conf.py
@@ -0,0 +1,268 @@
+# -*- coding: utf-8 -*-
+#
+# Mypy documentation build configuration file, created by
+# sphinx-quickstart on Sun Sep 14 19:50:35 2014.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('../..'))
+
+from mypy.version import __version__ as mypy_version
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Mypy'
+copyright = u'2016, Jukka Lehtosalo'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = mypy_version.split('-')[0]
+# The full version, including alpha/beta/rc tags.
+release = mypy_version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+try:
+    import sphinx_rtd_theme
+except:
+    html_theme = 'default'
+else:
+    html_theme = 'sphinx_rtd_theme'
+    html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+#html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Mypydoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+latex_documents = [
+  ('index', 'Mypy.tex', u'Mypy Documentation',
+   u'Jukka', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    ('index', 'mypy', u'Mypy Documentation',
+     [u'Jukka Lehtosalo'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+  ('index', 'Mypy', u'Mypy Documentation',
+   u'Jukka', 'Mypy', 'One line description of project.',
+   'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
+
+rst_prolog = '.. |...| unicode:: U+2026   .. ellipsis\n'
diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst
new file mode 100644
index 0000000..6a32414
--- /dev/null
+++ b/docs/source/config_file.rst
@@ -0,0 +1,210 @@
+.. _config-file:
+
+The mypy configuration file
+===========================
+
+Mypy supports reading configuration settings from a file.  By default
+it uses the file ``mypy.ini`` (with fallback to ``setup.cfg``) in the
+current directory; the ``--config-file`` command-line flag can be used to
+read a different file instead (see :ref:`--config-file <config-file-flag>`).
+
+It is important to understand that there is no merging of configuration
+files, as it would lead to ambiguity.  The ``--config-file`` flag
+has the highest precedence and must be correct; otherwise mypy will report
+an error and exit.  Without command line option, mypy will look for defaults,
+but will use only one of them.  The first one to read is ``mypy.ini``,
+and then ``setup.cfg``.
+
+Most flags correspond closely to :ref:`command-line flags
+<command-line>` but there are some differences in flag names and some
+flags may take a different value based on the module being processed.
+
+The configuration file format is the usual
+`ini file <https://docs.python.org/3.6/library/configparser.html>`_
+format.  It should contain section names in square brackets and flag
+settings of the form `NAME = VALUE`.  Comments start with ``#``
+characters.
+
+- A section named ``[mypy]`` must be present.  This specifies
+  the global flags. The ``setup.cfg`` file is an exception to this.
+
+- Additional sections named ``[mypy-PATTERN1,PATTERN2,...]`` may be
+  present, where ``PATTERN1``, ``PATTERN2`` etc. are `fnmatch patterns
+  <https://docs.python.org/3.6/library/fnmatch.html>`_
+  separated by commas.  These sections specify additional flags that
+  only apply to *modules* whose name matches at least one of the patterns.
+
+Global flags
+************
+
+The following global flags may only be set in the global section
+(``[mypy]``).
+
+- ``python_version`` (string) specifies the Python version used to
+  parse and check the target program.  The format is ``DIGIT.DIGIT``
+  for example ``2.7``.  The default is the version of the Python
+  interpreter used to run mypy.
+
+- ``platform`` (string) specifies the OS platform for the target
+  program, for example ``darwin`` or ``win32`` (meaning OS X or
+  Windows, respectively).  The default is the current platform as
+  revealed by Python's ``sys.platform`` variable.
+
+- ``custom_typing_module`` (string) specifies the name of an
+  alternative module which is to be considered equivalent to the
+  ``typing`` module.
+
+- ``custom_typeshed_dir`` (string) specifies the name of an
+  alternative directory which is used to look for stubs instead of the
+  default ``typeshed`` directory.
+
+- ``mypy_path`` (string) specifies the paths to use, after trying the paths
+  from ``MYPYPATH`` environment variable.  Useful if you'd like to keep stubs
+  in your repo, along with the config file.
+
+- ``warn_incomplete_stub`` (Boolean, default False) warns for missing
+  type annotation in typeshed.  This is only relevant in combination
+  with ``check_untyped_defs``.
+
+- ``warn_redundant_casts`` (Boolean, default False) warns about
+  casting an expression to its inferred type.
+
+- ``warn_unused_ignores`` (Boolean, default False) warns about
+  unneeded ``# type: ignore`` comments.
+
+- ``strict_optional`` (Boolean, default False) enables experimental
+  strict Optional checks.
+
+- ``scripts_are_modules`` (Boolean, default False) makes script ``x``
+  become module ``x`` instead of ``__main__``.  This is useful when
+  checking multiple scripts in a single run.
+
+- ``verbosity`` (integer, default 0) controls how much debug output
+  will be generated.  Higher numbers are more verbose.
+
+- ``pdb`` (Boolean, default False) invokes pdb on fatal error.
+
+- ``show_traceback`` (Boolean, default False) shows traceback on fatal
+  error.
+
+- ``dump_type_stats`` (Boolean, default False) dumps stats about type
+  definitions.
+
+- ``dump_inference_stats`` (Boolean, default False) dumps stats about
+  type inference.
+
+- ``incremental`` (Boolean, default False) enables :ref:`incremental
+  mode <incremental>`.
+
+- ``cache_dir`` (string, default ``.mypy_cache``) stores module cache
+  info in the given folder in :ref:`incremental mode <incremental>`.
+  The cache is only read in incremental mode, but it is always written
+  unless the value is set to ``/dev/null`` (UNIX) or ``nul``
+  (Windows).
+
+- ``quick_and_dirty`` (Boolean, default False) enables :ref:`quick
+  mode <quick-mode>`.
+
+- ``show_error_context`` (Boolean, default False) shows
+  context notes before errors.
+
+- ``show_column_numbers`` (Boolean, default False) shows column numbers in
+  error messages.
+
+
+.. _per-module-flags:
+
+Per-module flags
+****************
+
+The following flags may vary per module.  They may also be specified in
+the global section; the global section provides defaults which are
+overridden by the pattern sections matching the module name.
+
+.. note::
+
+   If multiple pattern sections match a module they are processed in
+   unspecified order.
+
+- ``follow_imports`` (string, default ``normal``) directs what to do
+  with imports when the imported module is found as a ``.py`` file and
+  not part of the files, modules and packages on the command line.
+  The four possible values are ``normal``, ``silent``, ``skip`` and
+  ``error``.  For explanations see the discussion for the
+  :ref:`--follow-imports <follow-imports>` command line flag.  Note
+  that if pattern matching is used, the pattern should match the name
+  of the _imported_ module, not the module containing the import
+  statement.
+
+- ``ignore_missing_imports`` (Boolean, default False) suppress error
+  messages about imports that cannot be resolved.  Note that if
+  pattern matching is used, the pattern should match the name of the
+  _imported_ module, not the module containing the import statement.
+
+- ``silent_imports`` (Boolean, deprecated) equivalent to
+  ``follow_imports=skip`` plus ``ignore_missing_imports=True``.
+
+- ``almost_silent`` (Boolean, deprecated) equivalent to
+  ``follow_imports=skip``.
+
+- ``disallow_any`` (Comma-separated list, default empty) is an option to
+  disallow various types of ``Any`` in a module. The flag takes a
+  comma-separated list of the following arguments: ``unimported``,
+  ``unannotated``, ``expr``, ``decorated``, ``explicit``, ``generics``.
+  For explanations see the discussion for the :ref:`--disallow-any <disallow-any>` option.
+
+- ``disallow_untyped_calls`` (Boolean, default False) disallows
+  calling functions without type annotations from functions with type
+  annotations.
+
+- ``disallow_untyped_defs`` (Boolean, default False) disallows
+  defining functions without type annotations or with incomplete type
+  annotations.
+
+- ``check_untyped_defs`` (Boolean, default False) type-checks the
+  interior of functions without type annotations.
+
+- ``debug_cache`` (Boolean, default False) writes the incremental
+  cache JSON files using a more readable, but slower format.
+
+- ``show_none_errors`` (Boolean, default True) shows errors related
+  to strict ``None`` checking, if the global ``strict_optional`` flag
+  is enabled.
+
+- ``ignore_errors`` (Boolean, default False) ignores all non-fatal
+  errors.
+
+- ``warn_no_return`` (Boolean, default True) shows errors for
+  missing return statements on some execution paths.
+
+- ``warn_return_any`` (Boolean, default False) shows a warning when
+  returning a value with type ``Any`` from a function declared with a
+  non- ``Any`` return type.
+
+- ``strict_boolean`` (Boolean, default False) makes using non-boolean
+  expressions in conditions an error.
+
+- ``no_implicit_optional`` (Boolean, default false) changes the treatment of
+  arguments with a default value of None by not implicitly making their type Optional
+
+Example
+*******
+
+You might put this in your ``mypy.ini`` file at the root of your repo:
+
+.. code-block:: text
+
+    [mypy]
+    python_version = 2.7
+    [mypy-foo.*]
+    disallow_untyped_defs = True
+
+This automatically sets ``--python-version 2.7`` (a.k.a. ``--py2``)
+for all mypy runs in this tree, and also selectively turns on the
+``--disallow-untyped-defs`` flag for all modules in the ``foo``
+package.  This issues an error for function definitions without
+type annotations in that subdirectory only.
+
+.. note::
+
+   Configuration flags are liable to change between releases.
diff --git a/docs/source/duck_type_compatibility.rst b/docs/source/duck_type_compatibility.rst
new file mode 100644
index 0000000..a128b69
--- /dev/null
+++ b/docs/source/duck_type_compatibility.rst
@@ -0,0 +1,40 @@
+Duck type compatibility
+-----------------------
+
+In Python, certain types are compatible even though they aren't subclasses of
+each other. For example, ``int`` objects are valid whenever ``float`` objects
+are expected. Mypy supports this idiom via *duck type compatibility*. As of
+now, this is only supported for a small set of built-in types:
+
+* ``int`` is duck type compatible with ``float`` and ``complex``.
+* ``float`` is duck type compatible with ``complex``.
+* In Python 2, ``str`` is duck type compatible with ``unicode``.
+
+.. note::
+
+   Mypy support for Python 2 is still work in progress.
+
+For example, mypy considers an ``int`` object to be valid whenever a
+``float`` object is expected.  Thus code like this is nice and clean
+and also behaves as expected:
+
+.. code-block:: python
+
+   def degrees_to_radians(x: float) -> float:
+       return math.pi * degrees / 180
+
+   n = 90  # Inferred type 'int'
+   print(degrees_to_radians(n))   # Okay!
+
+.. note::
+
+   Note that in Python 2 a ``str`` object with non-ASCII characters is
+   often *not valid* when a unicode string is expected. The mypy type
+   system does not consider a string with non-ASCII values as a
+   separate type so some programs with this kind of error will
+   silently pass type checking. In Python 3 ``str`` and ``bytes`` are
+   separate, unrelated types and this kind of error is easy to
+   detect. This a good reason for preferring Python 3 over Python 2!
+
+   See :ref:`text-and-anystr` for details on how to enforce that a
+   value must be a unicode string in a cross-compatible way.
diff --git a/docs/source/dynamic_typing.rst b/docs/source/dynamic_typing.rst
new file mode 100644
index 0000000..ba76442
--- /dev/null
+++ b/docs/source/dynamic_typing.rst
@@ -0,0 +1,86 @@
+.. _dynamic_typing:
+
+
+Dynamically typed code
+======================
+
+As mentioned earlier, bodies of functions that don't have have any
+explicit types in their function annotation are dynamically typed
+(operations are checked at runtime). Code outside functions is
+statically typed by default, and types of variables are inferred. This
+does usually the right thing, but you can also make any variable
+dynamically typed by defining it explicitly with the type ``Any``:
+
+.. code-block:: python
+
+   from typing import Any
+
+   s = 1                 # Statically typed (type int)
+   d = 1  # type: Any    # Dynamically typed (type Any)
+   s = 'x'               # Type check error
+   d = 'x'               # OK
+
+Operations on Any values
+------------------------
+
+You can do anything using a value with type ``Any``, and type checker
+does not complain:
+
+.. code-block:: python
+
+    def f(x: Any) -> int:
+        # All of these are valid!
+        x.foobar(1, y=2)
+        print(x[3] + 'f')
+        if x:
+            x.z = x(2)
+        open(x).read()
+        return x
+
+Values derived from an ``Any`` value also often have the type ``Any``
+implicitly, as mypy can't infer a more precise result type. For
+example, if you get the attribute of an ``Any`` value or call a
+``Any`` value the result is ``Any``:
+
+.. code-block:: python
+
+    def f(x: Any) -> None:
+        y = x.foo()  # y has type Any
+        y.bar()      # Okay as well!
+
+``Any`` types may propagate through your program, making type checking
+less effective, unless you are careful.
+
+Any vs. object
+--------------
+
+The type ``object`` is another type that can have an instance of arbitrary
+type as a value. Unlike ``Any``, ``object`` is an ordinary static type (it
+is similar to ``Object`` in Java), and only operations valid for *all*
+types are accepted for ``object`` values. These are all valid:
+
+.. code-block:: python
+
+    def f(o: object) -> None:
+        if o:
+            print(o)
+        print(isinstance(o, int))
+        o = 2
+        o = 'foo'
+
+These are, however, flagged as errors, since not all objects support these
+operations:
+
+.. code-block:: python
+
+    def f(o: object) -> None:
+        o.foo()       # Error!
+        o + 2         # Error!
+        open(o)       # Error!
+        n = 1  # type: int
+        n = o         # Error!
+
+You can use ``cast()`` (see chapter :ref:`casts`) or ``isinstance`` to
+go from a general type such as ``object`` to a more specific
+type (subtype) such as ``int``.  ``cast()`` is not needed with
+dynamically typed values (values with type ``Any``).
diff --git a/docs/source/faq.rst b/docs/source/faq.rst
new file mode 100644
index 0000000..9fd73b4
--- /dev/null
+++ b/docs/source/faq.rst
@@ -0,0 +1,270 @@
+Frequently Asked Questions
+==========================
+
+Why have both dynamic and static typing?
+****************************************
+
+Dynamic typing can be flexible, powerful, convenient and easy. But
+it's not always the best approach; there are good reasons why many
+developers choose to use statically typed languages.
+
+Here are some potential benefits of mypy-style static typing:
+
+- Static typing can make programs easier to understand and
+  maintain. Type declarations can serve as machine-checked
+  documentation. This is important as code is typically read much more
+  often than modified, and this is especially important for large and
+  complex programs.
+
+- Static typing can help you find bugs earlier and with less testing
+  and debugging. Especially in large and complex projects this can be
+  a major time-saver.
+
+- Static typing can help you find difficult-to-find bugs before your
+  code goes into production. This can improve reliability and reduce
+  the number of security issues.
+
+- Static typing makes it practical to build very useful development
+  tools that can improve programming productivity or software quality,
+  including IDEs with precise and reliable code completion, static
+  analysis tools, etc.
+
+- You can get the benefits of both dynamic and static typing in a
+  single language. Dynamic typing can be perfect for a small project
+  or for writing the UI of your program, for example. As your program
+  grows, you can adapt tricky application logic to static typing to
+  help maintenance.
+
+See also the `front page <http://www.mypy-lang.org>`_ of the mypy web
+site.
+
+Would my project benefit from static typing?
+********************************************
+
+For many projects dynamic typing is perfectly fine (we think that
+Python is a great language). But sometimes your projects demand bigger
+guns, and that's when mypy may come in handy.
+
+If some of these ring true for your projects, mypy (and static typing)
+may be useful:
+
+- Your project is large or complex.
+
+- Your codebase must be maintained for a long time.
+
+- Multiple developers are working on the same code.
+
+- Running tests takes a lot of time or work (type checking may help
+  you find errors early in development, reducing the number of testing
+  iterations).
+
+- Some project members (devs or management) don't like dynamic typing,
+  but others prefer dynamic typing and Python syntax. Mypy could be a
+  solution that everybody finds easy to accept.
+
+- You want to future-proof your project even if currently none of the
+  above really apply.
+
+Can I use mypy to type check my existing Python code?
+*****************************************************
+
+It depends. Compatibility is pretty good, but some Python features are
+not yet implemented or fully supported. The ultimate goal is to make
+using mypy practical for most Python code. Code that uses complex
+introspection or metaprogramming may be impractical to type check, but
+it should still be possible to use static typing in other parts of a
+program.
+
+Will static typing make my programs run faster?
+***********************************************
+
+Mypy only does static type checking and it does not improve
+performance. It has a minimal performance impact. In the future, there
+could be other tools that can compile statically typed mypy code to C
+modules or to efficient JVM bytecode, for example, but this is outside
+the scope of the mypy project. It may also be possible to modify
+existing Python VMs to take advantage of static type information, but
+whether this is feasible is still unknown. This is nontrivial since
+the runtime types do not necessarily correspond to the static types.
+
+How do I type check my Python 2 code?
+*************************************
+
+You can use a `comment-based function annotation syntax
+<https://www.python.org/dev/peps/pep-0484/#suggested-syntax-for-python-2-7-and-straddling-code>`_
+and use the ``--py2`` command-line option to type check your Python 2 code.
+You'll also need to install ``typing`` for Python 2 via ``pip install typing``.
+
+Is mypy free?
+*************
+
+Yes. Mypy is free software, and it can also be used for commercial and
+proprietary projects. Mypy is available under the MIT license.
+
+Why not use structural subtyping?
+*********************************
+
+Mypy primarily uses `nominal subtyping
+<https://en.wikipedia.org/wiki/Nominative_type_system>`_ instead of
+`structural subtyping
+<https://en.wikipedia.org/wiki/Structural_type_system>`_. Some argue
+that structural subtyping is better suited for languages with duck
+typing such as Python.
+
+Here are some reasons why mypy uses nominal subtyping:
+
+1. It is easy to generate short and informative error messages when
+   using a nominal type system. This is especially important when
+   using type inference.
+
+2. Python supports basically nominal isinstance tests and they are
+   widely used in programs. It is not clear how to support isinstance
+   in a purely structural type system while remaining compatible with
+   Python idioms.
+
+3. Many programmers are already familiar with nominal subtyping and it
+   has been successfully used in languages such as Java, C++ and
+   C#. Only few languages use structural subtyping.
+
+However, structural subtyping can also be useful. Structural subtyping
+is a likely feature to be added to mypy in the future, even though we
+expect that most mypy programs will still primarily use nominal
+subtyping.
+
+I like Python and I have no need for static typing
+**************************************************
+
+That wasn't really a question, was it? Mypy is not aimed at replacing
+Python. The goal is to give more options for Python programmers, to
+make Python a more competitive alternative to other statically typed
+languages in large projects, to improve programmer productivity and to
+improve software quality.
+
+How are mypy programs different from normal Python?
+***************************************************
+
+Since you use a vanilla Python implementation to run mypy programs,
+mypy programs are also Python programs. The type checker may give
+warnings for some valid Python code, but the code is still always
+runnable. Also, some Python features and syntax are still not
+supported by mypy, but this is gradually improving.
+
+The obvious difference is the availability of static type
+checking. The section :ref:`common_issues` mentions some
+modifications to Python code that may be required to make code type
+check without errors. Also, your code must make attributes explicit and
+use a explicit protocol representation. For example, you may want to
+subclass an Abstract Base Class such as ``typing.Iterable``.
+
+Mypy will support modular, efficient type checking, and this seems to
+rule out type checking some language features, such as arbitrary
+runtime addition of methods. However, it is likely that many of these
+features will be supported in a restricted form (for example, runtime
+modification is only supported for classes or methods registered as
+dynamic or 'patchable').
+
+How is mypy different from PyPy?
+********************************
+
+*This answer relates to PyPy as a Python implementation. See also the answer related to RPython below.*
+
+Mypy and PyPy are orthogonal. Mypy does static type checking, i.e. it
+is basically a linter, but static typing has no runtime effect,
+whereas the PyPy is an Python implementation. You can use PyPy to run
+mypy programs.
+
+How is mypy different from Cython?
+**********************************
+
+`Cython <http://cython.org/>`_ is a variant of Python that supports
+compilation to CPython C modules. It can give major speedups to
+certain classes of programs compared to CPython, and it provides
+static typing (though this is different from mypy). Mypy differs in
+the following aspects, among others:
+
+- Cython is much more focused on performance than mypy. Mypy is only
+  about static type checking, and increasing performance is not a
+  direct goal.
+
+- The mypy syntax is arguably simpler and more "Pythonic" (no cdef/cpdef, etc.) for statically typed code.
+
+- The mypy syntax is compatible with Python. Mypy programs are normal
+  Python programs that can be run using any Python
+  implementation. Cython has many incompatible extensions to Python
+  syntax, and Cython programs generally cannot be run without first
+  compiling them to CPython extension modules via C. Cython also has a
+  pure Python mode, but it seems to support only a subset of Cython
+  functionality, and the syntax is quite verbose.
+
+- Mypy has a different set of type system features. For example, mypy
+  has genericity (parametric polymorphism), function types and
+  bidirectional type inference, which are not supported by
+  Cython. (Cython has fused types that are different but related to
+  mypy generics. Mypy also has a similar feature as an extension of
+  generics.)
+
+- The mypy type checker knows about the static types of many Python
+  stdlib modules and can effectively type check code that uses them.
+
+- Cython supports accessing C functions directly and many features are
+  defined in terms of translating them to C or C++. Mypy just uses
+  Python semantics, and mypy does not deal with accessing C library
+  functionality.
+
+How is mypy different from Nuitka?
+**********************************
+
+`Nuitka <http://nuitka.net/>`_ is a static compiler that can translate
+Python programs to C++. Nuitka integrates with the CPython
+runtime. Nuitka has additional future goals, such as using type
+inference and whole-program analysis to further speed up code. Here
+are some differences:
+
+- Nuitka is primarily focused on speeding up Python code. Mypy focuses
+  on static type checking and facilitating better tools.
+
+- Whole-program analysis tends to be slow and scale poorly to large or
+  complex programs. It is still unclear if Nuitka can solve these
+  issues. Mypy does not use whole-program analysis and will support
+  modular type checking (though this has not been implemented yet).
+
+How is mypy different from RPython or Shed Skin?
+************************************************
+
+`RPython <http://doc.pypy.org/en/latest/coding-guide.html>`_ and `Shed
+Skin <http://shed-skin.blogspot.co.uk/>`_ are basically statically
+typed subsets of Python. Mypy does the following important things
+differently:
+
+- RPython is primarily designed for implementing virtual machines;
+  mypy is a general-purpose tool.
+
+- Mypy supports both static and dynamic typing. Dynamically typed and
+  statically typed code can be freely mixed and can interact
+  seamlessly.
+
+- Mypy aims to support (in the future) fast and modular type
+  checking. Both RPython and Shed Skin use whole-program type
+  inference which is very slow, does not scale well to large programs
+  and often produces confusing error messages. Mypy can support
+  modularity since it only uses local type inference; static type
+  checking depends on having type annotations for functions
+  signatures.
+
+- Mypy will support introspection, dynamic loading of code and many
+  other dynamic language features (though using these may make static
+  typing less effective). RPython and Shed Skin only support a
+  restricted Python subset without several of these features.
+
+- Mypy supports user-defined generic types.
+
+Mypy is a cool project. Can I help?
+***********************************
+
+Any help is much appreciated! `Contact
+<http://www.mypy-lang.org/contact.html>`_ the developers if you would
+like to contribute. Any help related to development, design,
+publicity, documentation, testing, web site maintenance, financing,
+etc. can be helpful. You can learn a lot by contributing, and anybody
+can help, even beginners! However, some knowledge of compilers and/or
+type systems is essential if you want to work on mypy internals.
diff --git a/docs/source/function_overloading.rst b/docs/source/function_overloading.rst
new file mode 100644
index 0000000..43f365b
--- /dev/null
+++ b/docs/source/function_overloading.rst
@@ -0,0 +1,92 @@
+.. _function-overloading:
+
+Function Overloading
+====================
+
+Sometimes the types in a function depend on each other in ways that
+can't be captured with a ``Union``.  For example, the ``__getitem__``
+(``[]`` bracket indexing) method can take an integer and return a
+single item, or take a ``slice`` and return a ``Sequence`` of items.
+You might be tempted to annotate it like so:
+
+.. code-block:: python
+
+    from typing import Sequence, TypeVar, Union
+    T = TypeVar('T')
+
+    class MyList(Sequence[T]):
+        def __getitem__(self, index: Union[int, slice]) -> Union[T, Sequence[T]]:
+            if isinstance(index, int):
+                ...  # Return a T here
+            elif isinstance(index, slice):
+                ...  # Return a sequence of Ts here
+            else:
+                raise TypeError(...)
+
+But this is too loose, as it implies that when you pass in an ``int``
+you might sometimes get out a single item and sometimes a sequence.
+The return type depends on the parameter type in a way that can't be
+expressed using a type variable.  Instead, we can use `overloading
+<https://www.python.org/dev/peps/pep-0484/#function-method-overloading>`_
+to give the same function multiple type annotations (signatures) and
+accurately describe the function's behavior.
+
+.. code-block:: python
+
+    from typing import overload, Sequence, TypeVar, Union
+    T = TypeVar('T')
+
+    class MyList(Sequence[T]):
+
+        # The @overload definitions are just for the type checker,
+        # and overwritten by the real implementation below.
+        @overload
+        def __getitem__(self, index: int) -> T:
+            pass  # Don't put code here
+
+        # All overloads and the implementation must be adjacent
+        # in the source file, and overload order may matter:
+        # when two overloads may overlap, the more specific one
+        # should come first.
+        @overload
+        def __getitem__(self, index: slice) -> Sequence[T]:
+            pass  # Don't put code here
+
+        # The implementation goes last, without @overload.
+        # It may or may not have type hints; if it does,
+        # these are checked against the overload definitions
+        # as well as against the implementation body.
+        def __getitem__(self, index):
+            # This is exactly the same as before.
+            if isinstance(index, int):
+                ...  # Return a T here
+            elif isinstance(index, slice):
+                ...  # Return a sequence of Ts here
+            else:
+                raise TypeError(...)
+
+Overloaded function variants are still ordinary Python functions and
+they still define a single runtime object. There is no automatic
+dispatch happening, and you must manually handle the different types
+in the implementation (usually with :func:`isinstance` checks, as
+shown in the example).
+
+The overload variants must be adjacent in the code. This makes code
+clearer, as you don't have to hunt for overload variants across the
+file.
+
+Overloads in stub files are exactly the same, except there is no
+implementation.
+
+.. note::
+
+   As generic type variables are erased at runtime when constructing
+   instances of generic types, an overloaded function cannot have
+   variants that only differ in a generic type argument,
+   e.g. ``List[int]`` and ``List[str]``.
+
+.. note::
+
+   If you just need to constrain a type variable to certain types or
+   subtypes, you can use a :ref:`value restriction
+   <type-variable-value-restriction>`.
diff --git a/docs/source/generics.rst b/docs/source/generics.rst
new file mode 100644
index 0000000..bd0e054
--- /dev/null
+++ b/docs/source/generics.rst
@@ -0,0 +1,541 @@
+Generics
+========
+
+Defining generic classes
+************************
+
+The built-in collection classes are generic classes. Generic types
+have one or more type parameters, which can be arbitrary types. For
+example, ``Dict[int, str]`` has the type parameters ``int`` and
+``str``, and ``List[int]`` has a type parameter ``int``.
+
+Programs can also define new generic classes. Here is a very simple
+generic class that represents a stack:
+
+.. code-block:: python
+
+   from typing import TypeVar, Generic
+
+   T = TypeVar('T')
+
+   class Stack(Generic[T]):
+       def __init__(self) -> None:
+           # Create an empty list with items of type T
+           self.items = []  # type: List[T]
+
+       def push(self, item: T) -> None:
+           self.items.append(item)
+
+       def pop(self) -> T:
+           return self.items.pop()
+
+       def empty(self) -> bool:
+           return not self.items
+
+The ``Stack`` class can be used to represent a stack of any type:
+``Stack[int]``, ``Stack[Tuple[int, str]]``, etc.
+
+Using ``Stack`` is similar to built-in container types:
+
+.. code-block:: python
+
+   # Construct an empty Stack[int] instance
+   stack = Stack[int]()
+   stack.push(2)
+   stack.pop()
+   stack.push('x')        # Type error
+
+Type inference works for user-defined generic types as well:
+
+.. code-block:: python
+
+   def process(stack: Stack[int]) -> None: ...
+
+   process(Stack())   # Argument has inferred type Stack[int]
+
+Construction of instances of generic types is also type checked:
+
+.. code-block:: python
+
+   class Box(Generic[T]):
+       def __init__(self, content: T) -> None:
+           self.content = content
+
+   Box(1)  # OK, inferred type is Box[int]
+   Box[int](1)  # Also OK
+   s = 'some string'
+   Box[int](s)  # Type error
+
+Generic class internals
+***********************
+
+You may wonder what happens at runtime when you index
+``Stack``. Actually, indexing ``Stack`` returns essentially a copy
+of ``Stack`` that returns instances of the original class on
+instantiation:
+
+>>> print(Stack)
+__main__.Stack
+>>> print(Stack[int])
+__main__.Stack[int]
+>>> print(Stack[int]().__class__)
+__main__.Stack
+
+Note that built-in types ``list``, ``dict`` and so on do not support
+indexing in Python. This is why we have the aliases ``List``, ``Dict``
+and so on in the ``typing`` module. Indexing these aliases gives
+you a class that directly inherits from the target class in Python:
+
+>>> from typing import List
+>>> List[int]
+typing.List[int]
+>>> List[int].__bases__
+(<class 'list'>, typing.MutableSequence)
+
+Generic types could be instantiated or subclassed as usual classes,
+but the above examples illustrate that type variables are erased at
+runtime. Generic ``Stack`` instances are just ordinary
+Python objects, and they have no extra runtime overhead or magic due
+to being generic, other than a metaclass that overloads the indexing
+operator.
+
+.. _generic-subclasses:
+
+Defining sub-classes of generic classes
+***************************************
+
+User-defined generic classes and generic classes defined in ``typing``
+can be used as base classes for another classes, both generic and
+non-generic. For example:
+
+.. code-block:: python
+
+   from typing import Generic, TypeVar, Iterable
+
+   T = TypeVar('T')
+
+   class Stream(Iterable[T]):  # This is a generic subclass of Iterable
+       def __iter__(self) -> Iterator[T]:
+           ...
+
+   input: Stream[int]  # Okay
+
+   class Codes(Iterable[int]):  # This is a non-generic subclass of Iterable
+       def __iter__(self) -> Iterator[int]:
+           ...
+
+   output: Codes[int]  # Error! Codes is not generic
+
+   class Receiver(Generic[T]):
+       def accept(self, value: T) -> None:
+           ...
+
+   class AdvancedReceiver(Receiver[T]):
+       ...
+
+.. note::
+
+    You have to add an explicit ``Iterable`` (or ``Iterator``) base class
+    if you want mypy to consider a user-defined class as iterable (and
+    ``Sequence`` for sequences, etc.). This is because mypy doesn't support
+    *structural subtyping* and just having an ``__iter__`` method defined is
+    not sufficient to make mypy treat a class as iterable.
+
+``Generic[...]`` can be omitted from bases if there are
+other base classes that include type variables, such as ``Iterable[T]`` in
+the above example. If you include ``Generic[...]`` in bases, then
+it should list all type variables present in other bases (or more,
+if needed). The order of type variables is defined by the following
+rules:
+
+* If ``Generic[...]`` is present, then the order of variables is
+  always determined by their order in ``Generic[...]``.
+* If there are no ``Generic[...]`` in bases, then all type variables
+  are collected in the lexicographic order (i.e. by first appearance).
+
+For example:
+
+.. code-block:: python
+
+   from typing import Generic, TypeVar, Any
+
+   T = TypeVar('T')
+   S = TypeVar('S')
+   U = TypeVar('U')
+
+   class One(Generic[T]): ...
+   class Another(Generic[T]): ...
+
+   class First(One[T], Another[S]): ...
+   class Second(One[T], Another[S], Generic[S, U, T]): ...
+
+   x: First[int, str]        # Here T is bound to int, S is bound to str
+   y: Second[int, str, Any]  # Here T is Any, S is int, and U is str
+
+.. _generic-functions:
+
+Generic functions
+*****************
+
+Generic type variables can also be used to define generic functions:
+
+.. code-block:: python
+
+   from typing import TypeVar, Sequence
+
+   T = TypeVar('T')      # Declare type variable
+
+   def first(seq: Sequence[T]) -> T:   # Generic function
+       return seq[0]
+
+As with generic classes, the type variable can be replaced with any
+type. That means ``first`` can be used with any sequence type, and the
+return type is derived from the sequence item type. For example:
+
+.. code-block:: python
+
+   # Assume first defined as above.
+
+   s = first('foo')      # s has type str.
+   n = first([1, 2, 3])  # n has type int.
+
+Note also that a single definition of a type variable (such as ``T``
+above) can be used in multiple generic functions or classes. In this
+example we use the same type variable in two generic functions:
+
+.. code-block:: python
+
+   from typing import TypeVar, Sequence
+
+   T = TypeVar('T')      # Declare type variable
+
+   def first(seq: Sequence[T]) -> T:
+       return seq[0]
+
+   def last(seq: Sequence[T]) -> T:
+       return seq[-1]
+
+.. _generic-methods-and-generic-self:
+
+Generic methods and generic self
+********************************
+
+You can also define generic methods — just use a type variable in the
+method signature that is different from class type variables. In particular,
+``self`` may also be generic, allowing a method to return the most precise
+type known at the point of access.
+
+.. note::
+
+   This feature is experimental. Checking code with type annotations for self
+   arguments is still not fully implemented. Mypy may disallow valid code or
+   allow unsafe code.
+
+In this way, for example, you can typecheck chaining of setter methods:
+
+.. code-block:: python
+
+   from typing import TypeVar
+
+   T = TypeVar('T', bound='Shape')
+
+   class Shape:
+       def set_scale(self: T, scale: float) -> T:
+           self.scale = scale
+           return self
+
+   class Circle(Shape):
+       def set_radius(self, r: float) -> 'Circle':
+           self.radius = r
+           return self
+
+   class Square(Shape):
+       def set_width(self, w: float) -> 'Square':
+           self.width = w
+           return self
+
+   circle = Circle().set_scale(0.5).set_radius(2.7)  # type: Circle
+   square = Square().set_scale(0.5).set_width(3.2)  # type: Square
+
+Without using generic ``self``, the last two lines could not be type-checked properly.
+
+Other uses are factory methods, such as copy and deserialization.
+For class methods, you can also define generic ``cls``, using ``Type[T]``:
+
+.. code-block:: python
+
+   from typing import TypeVar, Tuple, Type
+
+   T = TypeVar('T', bound='Friend')
+
+   class Friend:
+       other = None  # type: Friend
+
+       @classmethod
+       def make_pair(cls: Type[T]) -> Tuple[T, T]:
+           a, b = cls(), cls()
+           a.other = b
+           b.other = a
+           return a, b
+
+   class SuperFriend(Friend):
+       pass
+
+   a, b = SuperFriend.make_pair()
+
+Note that when overriding a method with generic ``self``, you must either
+return a generic ``self`` too, or return an instance of the current class.
+In the latter case, you must implement this method in all future subclasses.
+
+Note also that mypy cannot always verify that the implementation of a copy
+or a deserialization method returns the actual type of self. Therefore
+you may need to silence mypy inside these methods (but not at the call site),
+possibly by making use of the ``Any`` type.
+
+.. _variance-of-generics:
+
+Variance of generic types
+*************************
+
+There are three main kinds of generic types with respect to subtype
+relations between them: invariant, covariant, and contravariant.
+Assuming that we have a pair of types types ``A`` and ``B`` and ``B`` is
+a subtype of ``A``, these are defined as follows:
+
+* A generic class ``MyCovGen[T, ...]`` is called covariant in type variable
+  ``T`` if ``MyCovGen[B, ...]`` is always a subtype of ``MyCovGen[A, ...]``.
+* A generic class ``MyContraGen[T, ...]`` is called contravariant in type
+  variable ``T`` if ``MyContraGen[A, ...]`` is always a subtype of
+  ``MyContraGen[B, ...]``.
+* A generic class ``MyInvGen[T, ...]`` is called invariant in ``T`` if neither
+  of the above is true.
+
+Let us illustrate this by few simple examples:
+
+* ``Union`` is covariant in all variables: ``Union[Cat, int]`` is a subtype
+  of ``Union[Animal, int]``,
+  ``Union[Dog, int]`` is also a subtype of ``Union[Animal, int]``, etc.
+  Most immutable containers such as ``Sequence`` and ``FrozenSet`` are also
+  covariant.
+* ``Callable`` is an example of type that behaves contravariant in types of
+  arguments, namely ``Callable[[Employee], int]`` is a subtype of
+  ``Callable[[Manager], int]``. To understand this, consider a function:
+
+  .. code-block:: python
+
+     def salaries(staff: List[Manager],
+                  accountant: Callable[[Manager], int]) -> List[int]: ...
+
+  this function needs a callable that can calculate a salary for managers, and
+  if we give it a callable that can calculate a salary for an arbitrary
+  employee, then it is still safe.
+* ``List`` is an invariant generic type. Naively, one would think
+  that it is covariant, but let us consider this code:
+
+  .. code-block:: python
+
+     class Shape:
+         pass
+     class Circle(Shape):
+         def rotate(self):
+             ...
+
+     def add_one(things: List[Shape]) -> None:
+         things.append(Shape())
+
+     my_things: List[Circle] = []
+     add_one(my_things)     # This may appear safe, but...
+     my_things[0].rotate()  # ...this will fail
+
+  Another example of invariant type is ``Dict``, most mutable containers
+  are invariant.
+
+By default, mypy assumes that all user-defined generics are invariant.
+To declare a given generic class as covariant or contravariant use
+type variables defined with special keyword arguments ``covariant`` or
+``contravariant``. For example:
+
+.. code-block:: python
+
+   from typing import Generic, TypeVar
+   T_co = TypeVar('T_co', covariant=True)
+
+   class Box(Generic[T_co]):  # this type is declared covariant
+       def __init__(self, content: T_co) -> None:
+           self._content = content
+       def get_content(self) -> T_co:
+           return self._content
+
+   def look_into(box: Box[Animal]): ...
+   my_box = Box(Cat())
+   look_into(my_box)  # OK, but mypy would complain here for an invariant type
+
+.. _type-variable-value-restriction:
+
+Type variables with value restriction
+*************************************
+
+By default, a type variable can be replaced with any type. However, sometimes
+it's useful to have a type variable that can only have some specific types
+as its value. A typical example is a type variable that can only have values
+``str`` and ``bytes``:
+
+.. code-block:: python
+
+   from typing import TypeVar
+
+   AnyStr = TypeVar('AnyStr', str, bytes)
+
+This is actually such a common type variable that ``AnyStr`` is
+defined in ``typing`` and we don't need to define it ourselves.
+
+We can use ``AnyStr`` to define a function that can concatenate
+two strings or bytes objects, but it can't be called with other
+argument types:
+
+.. code-block:: python
+
+   from typing import AnyStr
+
+   def concat(x: AnyStr, y: AnyStr) -> AnyStr:
+       return x + y
+
+   concat('a', 'b')    # Okay
+   concat(b'a', b'b')  # Okay
+   concat(1, 2)        # Error!
+
+Note that this is different from a union type, since combinations
+of ``str`` and ``bytes`` are not accepted:
+
+.. code-block:: python
+
+   concat('string', b'bytes')   # Error!
+
+In this case, this is exactly what we want, since it's not possible
+to concatenate a string and a bytes object! The type checker
+will reject this function:
+
+.. code-block:: python
+
+   def union_concat(x: Union[str, bytes], y: Union[str, bytes]) -> Union[str, bytes]:
+       return x + y  # Error: can't concatenate str and bytes
+
+Another interesting special case is calling ``concat()`` with a
+subtype of ``str``:
+
+.. code-block:: python
+
+    class S(str): pass
+
+    ss = concat(S('foo'), S('bar')))
+
+You may expect that the type of ``ss`` is ``S``, but the type is
+actually ``str``: a subtype gets promoted to one of the valid values
+for the type variable, which in this case is ``str``. This is thus
+subtly different from *bounded quantification* in languages such as
+Java, where the return type would be ``S``. The way mypy implements
+this is correct for ``concat``, since ``concat`` actually returns a
+``str`` instance in the above example:
+
+.. code-block:: python
+
+    >>> print(type(ss))
+    <class 'str'>
+
+You can also use a ``TypeVar`` with a restricted set of possible
+values when defining a generic class. For example, mypy uses the type
+``typing.Pattern[AnyStr]`` for the return value of ``re.compile``,
+since regular expressions can be based on a string or a bytes pattern.
+
+.. _type-variable-upper-bound:
+
+Type variables with upper bounds
+********************************
+
+A type variable can also be restricted to having values that are
+subtypes of a specific type. This type is called the upper bound of
+the type variable, and is specified with the ``bound=...`` keyword
+argument to ``TypeVar``.
+
+.. code-block:: python
+
+   from typing import TypeVar, SupportsAbs
+
+   T = TypeVar('T', bound=SupportsAbs[float])
+
+In the definition of a generic function that uses such a type variable
+``T``, the type represented by ``T`` is assumed to be a subtype of
+its upper bound, so the function can use methods of the upper bound on
+values of type ``T``.
+
+.. code-block:: python
+
+   def largest_in_absolute_value(*xs: T) -> T:
+       return max(xs, key=abs)  # Okay, because T is a subtype of SupportsAbs[float].
+
+In a call to such a function, the type ``T`` must be replaced by a
+type that is a subtype of its upper bound. Continuing the example
+above,
+
+.. code-block:: python
+
+   largest_in_absolute_value(-3.5, 2)   # Okay, has type float.
+   largest_in_absolute_value(5+6j, 7)   # Okay, has type complex.
+   largest_in_absolute_value('a', 'b')  # Error: 'str' is not a subtype of SupportsAbs[float].
+
+Type parameters of generic classes may also have upper bounds, which
+restrict the valid values for the type parameter in the same way.
+
+A type variable may not have both a value restriction (see
+:ref:`type-variable-value-restriction`) and an upper bound.
+
+.. _declaring-decorators:
+
+Declaring decorators
+********************
+
+One common application of type variable upper bounds is in declaring a
+decorator that preserves the signature of the function it decorates,
+regardless of that signature. Here's a complete example:
+
+.. code-block:: python
+
+   from typing import Any, Callable, TypeVar, Tuple, cast
+
+   FuncType = Callable[..., Any]
+   F = TypeVar('F', bound=FuncType)
+
+   # A decorator that preserves the signature.
+   def my_decorator(func: F) -> F:
+       def wrapper(*args, **kwds):
+           print("Calling", func)
+           return func(*args, **kwds)
+       return cast(F, wrapper)
+
+   # A decorated function.
+   @my_decorator
+   def foo(a: int) -> str:
+       return str(a)
+
+   # Another.
+   @my_decorator
+   def bar(x: float, y: float) -> Tuple[float, float, bool]:
+       return (x, y, x > y)
+
+   a = foo(12)
+   reveal_type(a)  # str
+   b = bar(3.14, 0)
+   reveal_type(b)  # Tuple[float, float, bool]
+   foo('x')    # Type check error: incompatible type "str"; expected "int"
+
+From the final block we see that the signatures of the decorated
+functions ``foo()`` and ``bar()`` are the same as those of the original
+functions (before the decorator is applied).
+
+The bound on ``F`` is used so that calling the decorator on a
+non-function (e.g. ``my_decorator(1)``) will be rejected.
+
+Also note that the ``wrapper()`` function is not type-checked. Wrapper
+functions are typically small enough that this is not a big
+problem. This is also the reason for the ``cast()`` call in the
+``return`` statement in ``my_decorator()``. See :ref:`casts`.
diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst
new file mode 100644
index 0000000..a41c125
--- /dev/null
+++ b/docs/source/getting_started.rst
@@ -0,0 +1,24 @@
+.. _getting-started:
+
+Getting started
+===============
+
+Installation
+************
+
+Mypy requires Python 3.3 or later.  Once you've `installed Python 3 <https://www.python.org/downloads/>`_, you can install mypy with:
+
+.. code-block:: text
+
+    $ python3 -m pip install mypy
+
+Installing from source
+**********************
+
+To install mypy from source, clone the github repository and then run pip install locally:
+
+.. code-block:: text
+
+    $ git clone https://github.com/python/mypy.git
+    $ cd mypy
+    $ sudo python3 -m pip install --upgrade .
diff --git a/docs/source/index.rst b/docs/source/index.rst
new file mode 100644
index 0000000..90cc749
--- /dev/null
+++ b/docs/source/index.rst
@@ -0,0 +1,42 @@
+.. Mypy documentation master file, created by
+   sphinx-quickstart on Sun Sep 14 19:50:35 2014.
+   You can adapt this file completely to your liking, but it should at least
+   contain the root `toctree` directive.
+
+Welcome to Mypy documentation!
+==============================
+
+Mypy is a static type checker for Python.
+
+.. toctree::
+   :maxdepth: 2
+
+   introduction
+   basics
+   getting_started
+   builtin_types
+   python2
+   type_inference_and_annotations
+   kinds_of_types
+   class_basics
+   dynamic_typing
+   function_overloading
+   casts
+   duck_type_compatibility
+   common_issues
+   generics
+   supported_python_features
+   additional_features
+   command_line
+   config_file
+   python36
+   faq
+   cheat_sheet
+   cheat_sheet_py3
+   revision_history
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`search`
diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst
new file mode 100644
index 0000000..3bcd0ad
--- /dev/null
+++ b/docs/source/introduction.rst
@@ -0,0 +1,30 @@
+Introduction
+============
+
+Mypy is a static type checker for Python. If you sprinkle your code
+with type annotations, mypy can type check your code and find common bugs.
+As mypy is a static analyzer, or a lint-like tool, your code's type
+annotations are just hints and don't interfere when running your program.
+You run your program with a standard Python interpreter, and the annotations
+are treated primarily as comments.
+
+Using the Python 3 function annotation syntax (using the PEP 484 notation) or
+a comment-based annotation syntax for Python 2 code, you will be able to
+efficiently annotate your code and use mypy to check the code for common
+errors. Mypy has a powerful, easy-to-use, type system with modern features
+such as type inference, generics, function types, tuple types and
+union types.
+
+As a developer, you decide how to use mypy in your workflow. You can always
+escape to dynamic typing as mypy's approach to static typing doesn't restrict
+what you can do in your programs. Using mypy will make your programs easier to
+debug, maintain, and understand.
+
+This documentation provides a short introduction to mypy. It will help you
+get started writing statically typed code. Knowledge of Python and a
+statically typed object-oriented language, such as Java, are assumed.
+
+.. note::
+
+   Mypy is still experimental. There will be changes
+   that break backward compatibility.
diff --git a/docs/source/kinds_of_types.rst b/docs/source/kinds_of_types.rst
new file mode 100644
index 0000000..0c3432f
--- /dev/null
+++ b/docs/source/kinds_of_types.rst
@@ -0,0 +1,1359 @@
+Kinds of types
+==============
+
+User-defined types
+******************
+
+Each class is also a type. Any instance of a subclass is also
+compatible with all superclasses. All values are compatible with the
+``object`` type (and also the ``Any`` type).
+
+.. code-block:: python
+
+   class A:
+       def f(self) -> int:        # Type of self inferred (A)
+           return 2
+
+   class B(A):
+       def f(self) -> int:
+            return 3
+       def g(self) -> int:
+           return 4
+
+   a = B() # type: A  # OK (explicit type for a; override type inference)
+   print(a.f())       # 3
+   a.g()              # Type check error: A has no method g
+
+The Any type
+************
+
+A value with the ``Any`` type is dynamically typed. Mypy doesn't know
+anything about the possible runtime types of such value. Any
+operations are permitted on the value, and the operations are checked
+at runtime, similar to normal Python code without type annotations.
+
+``Any`` is compatible with every other type, and vice versa. No
+implicit type check is inserted when assigning a value of type ``Any``
+to a variable with a more precise type:
+
+.. code-block:: python
+
+   a = None  # type: Any
+   s = ''    # type: str
+   a = 2     # OK
+   s = a     # OK
+
+Declared (and inferred) types are *erased* at runtime. They are
+basically treated as comments, and thus the above code does not
+generate a runtime error, even though ``s`` gets an ``int`` value when
+the program is run. Note that the declared type of ``s`` is actually
+``str``!
+
+If you do not define a function return value or argument types, these
+default to ``Any``:
+
+.. code-block:: python
+
+   def show_heading(s) -> None:
+       print('=== ' + s + ' ===')  # No static type checking, as s has type Any
+
+   show_heading(1)  # OK (runtime error only; mypy won't generate an error)
+
+You should give a statically typed function an explicit ``None``
+return type even if it doesn't return a value, as this lets mypy catch
+additional type errors:
+
+.. code-block:: python
+
+   def wait(t: float):  # Implicit Any return value
+       print('Waiting...')
+       time.sleep(t)
+
+   if wait(2) > 1:   # Mypy doesn't catch this error!
+       ...
+
+If we had used an explicit ``None`` return type, mypy would have caught
+the error:
+
+.. code-block:: python
+
+   def wait(t: float) -> None:
+       print('Waiting...')
+       time.sleep(t)
+
+   if wait(2) > 1:   # Error: can't compare None and int
+       ...
+
+The ``Any`` type is discussed in more detail in section :ref:`dynamic_typing`.
+
+.. note::
+
+  A function without any types in the signature is dynamically
+  typed. The body of a dynamically typed function is not checked
+  statically, and local variables have implicit ``Any`` types.
+  This makes it easier to migrate legacy Python code to mypy, as
+  mypy won't complain about dynamically typed functions.
+
+.. _tuple-types:
+
+Tuple types
+***********
+
+The type ``Tuple[T1, ..., Tn]`` represents a tuple with the item types ``T1``, ..., ``Tn``:
+
+.. code-block:: python
+
+   def f(t: Tuple[int, str]) -> None:
+       t = 1, 'foo'    # OK
+       t = 'foo', 1    # Type check error
+
+A tuple type of this kind has exactly a specific number of items (2 in
+the above example). Tuples can also be used as immutable,
+varying-length sequences. You can use the type ``Tuple[T, ...]`` (with
+a literal ``...`` -- it's part of the syntax) for this
+purpose. Example:
+
+.. code-block:: python
+
+    def print_squared(t: Tuple[int, ...]) -> None:
+        for n in t:
+            print(n, n ** 2)
+
+    print_squared(())           # OK
+    print_squared((1, 3, 5))    # OK
+    print_squared([1, 2])       # Error: only a tuple is valid
+
+.. note::
+
+   Usually it's a better idea to use ``Sequence[T]`` instead of ``Tuple[T, ...]``, as
+   ``Sequence`` is also compatible with lists and other non-tuple sequences.
+
+.. note::
+
+   ``Tuple[...]`` is not valid as a base class outside stub files. This is a
+   limitation of the ``typing`` module. One way to work around
+   this is to use a named tuple as a base class (see section :ref:`named-tuples`).
+
+.. _callable-types:
+
+Callable types (and lambdas)
+****************************
+
+You can pass around function objects and bound methods in statically
+typed code. The type of a function that accepts arguments ``A1``, ..., ``An``
+and returns ``Rt`` is ``Callable[[A1, ..., An], Rt]``. Example:
+
+.. code-block:: python
+
+   from typing import Callable
+
+   def twice(i: int, next: Callable[[int], int]) -> int:
+       return next(next(i))
+
+   def add(i: int) -> int:
+       return i + 1
+
+   print(twice(3, add))   # 5
+
+You can only have positional arguments, and only ones without default
+values, in callable types. These cover the vast majority of uses of
+callable types, but sometimes this isn't quite enough. Mypy recognizes
+a special form ``Callable[..., T]`` (with a literal ``...``) which can
+be used in less typical cases. It is compatible with arbitrary
+callable objects that return a type compatible with ``T``, independent
+of the number, types or kinds of arguments. Mypy lets you call such
+callable values with arbitrary arguments, without any checking -- in
+this respect they are treated similar to a ``(*args: Any, **kwargs:
+Any)`` function signature. Example:
+
+.. code-block:: python
+
+   from typing import Callable
+
+    def arbitrary_call(f: Callable[..., int]) -> int:
+        return f('x') + f(y=2)  # OK
+
+    arbitrary_call(ord)   # No static error, but fails at runtime
+    arbitrary_call(open)  # Error: does not return an int
+    arbitrary_call(1)     # Error: 'int' is not callable
+
+Lambdas are also supported. The lambda argument and return value types
+cannot be given explicitly; they are always inferred based on context
+using bidirectional type inference:
+
+.. code-block:: python
+
+   l = map(lambda x: x + 1, [1, 2, 3])   # Infer x as int and l as List[int]
+
+If you want to give the argument or return value types explicitly, use
+an ordinary, perhaps nested function definition.
+
+.. _extended_callable:
+
+Extended Callable types
+***********************
+
+As an experimental mypy extension, you can specify ``Callable`` types
+that support keyword arguments, optional arguments, and more.  Where
+you specify the arguments of a Callable, you can choose to supply just
+the type of a nameless positional argument, or an "argument specifier"
+representing a more complicated form of argument.  This allows one to
+more closely emulate the full range of possibilities given by the
+``def`` statement in Python.
+
+As an example, here's a complicated function definition and the
+corresponding ``Callable``:
+
+.. code-block:: python
+
+   from typing import Callable
+   from mypy_extensions import (Arg, DefaultArg, NamedArg,
+                                DefaultNamedArg, VarArg, KwArg)
+
+   def func(__a: int,  # This convention is for nameless arguments
+            b: int,
+            c: int = 0,
+            *args: int,
+            d: int,
+            e: int = 0,
+            **kwargs: int) -> int:
+       ...
+
+   F = Callable[[int,  # Or Arg(int)
+                 Arg(int, 'b'),
+                 DefaultArg(int, 'c'),
+                 VarArg(int),
+                 NamedArg(int, 'd'),
+                 DefaultNamedArg(int, 'e'),
+                 KwArg(int)],
+                int]
+
+   f: F = func
+
+Argument specifiers are special function calls that can specify the
+following aspects of an argument:
+
+- its type (the only thing that the basic format supports)
+
+- its name (if it has one)
+
+- whether it may be omitted
+
+- whether it may or must be passed using a keyword
+
+- whether it is a ``*args`` argument (representing the remaining
+  positional arguments)
+
+- whether it is a ``**kwargs`` argument (representing the remaining
+  keyword arguments)
+
+The following functions are available in ``mypy_extensions`` for this
+purpose:
+
+.. code-block:: python
+
+   def Arg(type=Any, name=None):
+       # A normal, mandatory, positional argument.
+       # If the name is specified it may be passed as a keyword.
+
+   def DefaultArg(type=Any, name=None):
+       # An optional positional argument (i.e. with a default value).
+       # If the name is specified it may be passed as a keyword.
+
+   def NamedArg(type=Any, name=None):
+       # A mandatory keyword-only argument.
+
+   def DefaultNamedArg(type=Any, name=None):
+       # An optional keyword-only argument (i.e. with a default value).
+
+   def VarArg(type=Any):
+       # A *args-style variadic positional argument.
+       # A single VarArg() specifier represents all remaining
+       # positional arguments.
+
+   def KwArg(type=Any):
+       # A **kwargs-style variadic keyword argument.
+       # A single KwArg() specifier represents all remaining
+       # keyword arguments.
+
+In all cases, the ``type`` argument defaults to ``Any``, and if the
+``name`` argument is omitted the argument has no name (the name is
+required for ``NamedArg`` and ``DefaultNamedArg``).  A basic
+``Callable`` such as
+
+.. code-block:: python
+
+   MyFunc = Callable[[int, str, int], float]
+
+is equivalent to the following:
+
+.. code-block:: python
+
+   MyFunc = Callable[[Arg(int), Arg(str), Arg(int)], float]
+
+A ``Callable`` with unspecified argument types, such as
+
+.. code-block:: python
+
+   MyOtherFunc = Callable[..., int]
+
+is (roughly) equivalent to
+
+.. code-block:: python
+
+   MyOtherFunc = Callable[[VarArg(), KwArg()], int]
+
+.. note::
+
+   This feature is experimental.  Details of the implementation may
+   change and there may be unknown limitations. **IMPORTANT:**
+   Each of the functions above currently just returns its ``type``
+   argument, so the information contained in the argument specifiers
+   is not available at runtime.  This limitation is necessary for
+   backwards compatibility with the existing ``typing.py`` module as
+   present in the Python 3.5+ standard library and distributed via
+   PyPI.
+
+.. _union-types:
+
+Union types
+***********
+
+Python functions often accept values of two or more different
+types. You can use overloading to model this in statically typed code,
+but union types can make code like this easier to write.
+
+Use the ``Union[T1, ..., Tn]`` type constructor to construct a union
+type. For example, the type ``Union[int, str]`` is compatible with
+both integers and strings. You can use an ``isinstance()`` check to
+narrow down the type to a specific type:
+
+.. code-block:: python
+
+   from typing import Union
+
+   def f(x: Union[int, str]) -> None:
+       x + 1     # Error: str + int is not valid
+       if isinstance(x, int):
+           # Here type of x is int.
+           x + 1      # OK
+       else:
+           # Here type of x is str.
+           x + 'a'    # OK
+
+   f(1)    # OK
+   f('x')  # OK
+   f(1.1)  # Error
+
+.. _optional:
+
+The type of None and optional types
+***********************************
+
+Mypy treats the type of ``None`` as special. ``None`` is a valid value
+for every type, which resembles ``null`` in Java. Unlike Java, mypy
+doesn't treat primitives types
+specially: ``None`` is also valid for primitive types such as ``int``
+and ``float``.
+
+.. note::
+
+   See :ref:`strict_optional` for an experimental mode which allows
+   mypy to check ``None`` values precisely.
+
+When initializing a variable as ``None``, ``None`` is usually an
+empty place-holder value, and the actual value has a different type.
+This is why you need to annotate an attribute in a case like this:
+
+.. code-block:: python
+
+    class A:
+        def __init__(self) -> None:
+            self.count = None  # type: int
+
+Mypy will complain if you omit the type annotation, as it wouldn't be
+able to infer a non-trivial type for the ``count`` attribute
+otherwise.
+
+Mypy generally uses the first assignment to a variable to
+infer the type of the variable. However, if you assign both a ``None``
+value and a non-``None`` value in the same scope, mypy can often do
+the right thing:
+
+.. code-block:: python
+
+   def f(i: int) -> None:
+       n = None  # Inferred type int because of the assignment below
+       if i > 0:
+            n = i
+       ...
+
+Often it's useful to know whether a variable can be
+``None``. For example, this function accepts a ``None`` argument,
+but it's not obvious from its signature:
+
+.. code-block:: python
+
+    def greeting(name: str) -> str:
+        if name:
+            return 'Hello, {}'.format(name)
+        else:
+            return 'Hello, stranger'
+
+    print(greeting('Python'))  # Okay!
+    print(greeting(None))      # Also okay!
+
+Mypy lets you use ``Optional[t]`` to document that ``None`` is a
+valid argument type:
+
+.. code-block:: python
+
+    from typing import Optional
+
+    def greeting(name: Optional[str]) -> str:
+        if name:
+            return 'Hello, {}'.format(name)
+        else:
+            return 'Hello, stranger'
+
+Mypy treats this as semantically equivalent to the previous example,
+since ``None`` is implicitly valid for any type, but it's much more
+useful for a programmer who is reading the code. You can equivalently
+use ``Union[str, None]``, but ``Optional`` is shorter and more
+idiomatic.
+
+.. note::
+
+    ``None`` is also used as the return type for functions that don't
+    return a value, i.e. that implicitly return ``None``. Mypy doesn't
+    use ``NoneType`` for this, since it would
+    look awkward, even though that is the real name of the type of ``None``
+    (try ``type(None)`` in the interactive interpreter to see for yourself).
+
+.. _strict_optional:
+
+Experimental strict optional type and None checking
+***************************************************
+
+Currently, ``None`` is a valid value for each type, similar to
+``null`` or ``NULL`` in many languages. However, you can use the
+experimental ``--strict-optional`` command line option to tell mypy
+that types should not include ``None``
+by default. The ``Optional`` type modifier is then used to define
+a type variant that includes ``None``, such as ``Optional[int]``:
+
+.. code-block:: python
+
+   from typing import Optional
+
+   def f() -> Optional[int]:
+       return None  # OK
+
+   def g() -> int:
+       ...
+       return None  # Error: None not compatible with int
+
+Also, most operations will not be allowed on unguarded ``None``
+or ``Optional`` values:
+
+.. code-block:: python
+
+   def f(x: Optional[int]) -> int:
+       return x + 1  # Error: Cannot add None and int
+
+Instead, an explicit ``None`` check is required. Mypy has
+powerful type inference that lets you use regular Python
+idioms to guard against ``None`` values. For example, mypy
+recognizes ``is None`` checks:
+
+.. code-block:: python
+
+   def f(x: Optional[int]) -> int:
+       if x is None:
+           return 0
+       else:
+           # The inferred type of x is just int here.
+           return x + 1
+
+Mypy will infer the type of ``x`` to be ``int`` in the else block due to the
+check against ``None`` in the if condition.
+
+.. note::
+
+    ``--strict-optional`` is experimental and still has known issues.
+
+.. _noreturn:
+
+The NoReturn type
+*****************
+
+Mypy provides support for functions that never return. For
+example, a function that unconditionally raises an exception:
+
+.. code-block:: python
+
+   from mypy_extensions import NoReturn
+
+   def stop() -> NoReturn:
+       raise Exception('no way')
+
+Mypy will ensure that functions annotated as returning ``NoReturn``
+truly never return, either implicitly or explicitly. Mypy will also
+recognize that the code after calls to such functions is unreachable
+and will behave accordingly:
+
+.. code-block:: python
+
+   def f(x: int) -> int:
+       if x == 0:
+           return x
+       stop()
+       return 'whatever works'  # No error in an unreachable block
+
+Install ``mypy_extensions`` using pip to use ``NoReturn`` in your code.
+Python 3 command line:
+
+.. code-block:: text
+
+    python3 -m pip install --upgrade mypy-extensions
+
+This works for Python 2:
+
+.. code-block:: text
+
+    pip install --upgrade mypy-extensions
+
+
+Class name forward references
+*****************************
+
+Python does not allow references to a class object before the class is
+defined. Thus this code does not work as expected:
+
+.. code-block:: python
+
+   def f(x: A) -> None:  # Error: Name A not defined
+       ....
+
+   class A:
+       ...
+
+In cases like these you can enter the type as a string literal — this
+is a *forward reference*:
+
+.. code-block:: python
+
+   def f(x: 'A') -> None:  # OK
+       ...
+
+   class A:
+       ...
+
+Of course, instead of using a string literal type, you could move the
+function definition after the class definition. This is not always
+desirable or even possible, though.
+
+Any type can be entered as a string literal, and you can combine
+string-literal types with non-string-literal types freely:
+
+.. code-block:: python
+
+   def f(a: List['A']) -> None: ...  # OK
+   def g(n: 'int') -> None: ...      # OK, though not useful
+
+   class A: pass
+
+String literal types are never needed in ``# type:`` comments.
+
+String literal types must be defined (or imported) later *in the same
+module*.  They cannot be used to leave cross-module references
+unresolved.  (For dealing with import cycles, see
+:ref:`import-cycles`.)
+
+.. _type-aliases:
+
+Type aliases
+************
+
+In certain situations, type names may end up being long and painful to type:
+
+.. code-block:: python
+
+   def f() -> Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]:
+       ...
+
+When cases like this arise, you can define a type alias by simply
+assigning the type to a variable:
+
+.. code-block:: python
+
+   AliasType = Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]
+
+   # Now we can use AliasType in place of the full name:
+
+   def f() -> AliasType:
+       ...
+
+Type aliases can be generic, in this case they could be used in two variants:
+Subscripted aliases are equivalent to original types with substituted type variables,
+number of type arguments must match the number of free type variables
+in generic type alias. Unsubscripted aliases are treated as original types with free
+variables replaced with ``Any``. Examples (following `PEP 484
+<https://www.python.org/dev/peps/pep-0484/#type-aliases>`_):
+
+.. code-block:: python
+
+    from typing import TypeVar, Iterable, Tuple, Union, Callable
+    S = TypeVar('S')
+    TInt = Tuple[int, S]
+    UInt = Union[S, int]
+    CBack = Callable[..., S]
+
+    def response(query: str) -> UInt[str]:  # Same as Union[str, int]
+        ...
+    def activate(cb: CBack[S]) -> S:        # Same as Callable[..., S]
+        ...
+    table_entry: TInt  # Same as Tuple[int, Any]
+
+    T = TypeVar('T', int, float, complex)
+    Vec = Iterable[Tuple[T, T]]
+
+    def inproduct(v: Vec[T]) -> T:
+        return sum(x*y for x, y in v)
+
+    def dilate(v: Vec[T], scale: T) -> Vec[T]:
+        return ((x * scale, y * scale) for x, y in v)
+
+    v1: Vec[int] = []      # Same as Iterable[Tuple[int, int]]
+    v2: Vec = []           # Same as Iterable[Tuple[Any, Any]]
+    v3: Vec[int, int] = [] # Error: Invalid alias, too many type arguments!
+
+Type aliases can be imported from modules like any names. Aliases can target another
+aliases (although building complex chains of aliases is not recommended, this
+impedes code readability, thus defeating the purpose of using aliases).
+Following previous examples:
+
+.. code-block:: python
+
+    from typing import TypeVar, Generic, Optional
+    from first_example import AliasType
+    from second_example import Vec
+
+    def fun() -> AliasType:
+        ...
+
+    T = TypeVar('T')
+    class NewVec(Generic[T], Vec[T]):
+        ...
+    for i, j in NewVec[int]():
+        ...
+
+    OIntVec = Optional[Vec[int]]
+
+.. note::
+
+    A type alias does not create a new type. It's just a shorthand notation for
+    another type -- it's equivalent to the target type. For generic type aliases
+    this means that variance of type variables used for alias definition does not
+    apply to aliases. A parameterized generic alias is treated simply as an original
+    type with the corresponding type variables substituted.
+
+.. _newtypes:
+
+NewTypes
+********
+
+(Freely after `PEP 484
+<https://www.python.org/dev/peps/pep-0484/#newtype-helper-function>`_.)
+
+There are also situations where a programmer might want to avoid logical errors by
+creating simple classes. For example:
+
+.. code-block:: python
+
+    class UserId(int):
+        pass
+
+    get_by_user_id(user_id: UserId):
+        ...
+
+However, this approach introduces some runtime overhead. To avoid this, the typing
+module provides a helper function ``NewType`` that creates simple unique types with
+almost zero runtime overhead. Mypy will treat the statement
+``Derived = NewType('Derived', Base)`` as being roughly equivalent to the following
+definition:
+
+.. code-block:: python
+
+    class Derived(Base):
+        def __init__(self, _x: Base) -> None:
+            ...
+
+However, at runtime, ``NewType('Derived', Base)`` will return a dummy function that
+simply returns its argument:
+
+.. code-block:: python
+
+    def Derived(_x):
+        return _x
+
+Mypy will require explicit casts from ``int`` where ``UserId`` is expected, while
+implicitly casting from ``UserId`` where ``int`` is expected. Examples:
+
+.. code-block:: python
+
+    from typing import NewType
+
+    UserId = NewType('UserId', int)
+
+    def name_by_id(user_id: UserId) -> str:
+        ...
+
+    UserId('user')          # Fails type check
+
+    name_by_id(42)          # Fails type check
+    name_by_id(UserId(42))  # OK
+
+    num = UserId(5) + 1     # type: int
+
+``NewType`` accepts exactly two arguments. The first argument must be a string literal
+containing the name of the new type and must equal the name of the variable to which the new
+type is assigned. The second argument must be a properly subclassable class, i.e.,
+not a type construct like ``Union``, etc.
+
+The function returned by ``NewType`` accepts only one argument; this is equivalent to
+supporting only one constructor accepting an instance of the base class (see above).
+Example:
+
+.. code-block:: python
+
+    from typing import NewType
+
+    class PacketId:
+        def __init__(self, major: int, minor: int) -> None:
+            self._major = major
+            self._minor = minor
+
+    TcpPacketId = NewType('TcpPacketId', PacketId)
+
+    packet = PacketId(100, 100)
+    tcp_packet = TcpPacketId(packet)  # OK
+
+    tcp_packet = TcpPacketId(127, 0)  # Fails in type checker and at runtime
+
+Both ``isinstance`` and ``issubclass``, as well as subclassing will fail for
+``NewType('Derived', Base)`` since function objects don't support these operations.
+
+.. note::
+
+    Note that unlike type aliases, ``NewType`` will create an entirely new and
+    unique type when used. The intended purpose of ``NewType`` is to help you
+    detect cases where you accidentally mixed together the old base type and the
+    new derived type.
+
+    For example, the following will successfully typecheck when using type
+    aliases:
+
+    .. code-block:: python
+
+        UserId = int
+
+        def name_by_id(user_id: UserId) -> str:
+            ...
+
+        name_by_id(3)  # ints and UserId are synonymous
+
+    But a similar example using ``NewType`` will not typecheck:
+
+    .. code-block:: python
+
+        from typing import NewType
+
+        UserId = NewType('UserId', int)
+
+        def name_by_id(user_id: UserId) -> str:
+            ...
+
+        name_by_id(3)  # int is not the same as UserId
+
+.. _named-tuples:
+
+Named tuples
+************
+
+Mypy recognizes named tuples and can type check code that defines or
+uses them.  In this example, we can detect code trying to access a
+missing attribute:
+
+.. code-block:: python
+
+    Point = namedtuple('Point', ['x', 'y'])
+    p = Point(x=1, y=2)
+    print(p.z)  # Error: Point has no attribute 'z'
+
+If you use ``namedtuple`` to define your named tuple, all the items
+are assumed to have ``Any`` types. That is, mypy doesn't know anything
+about item types. You can use ``typing.NamedTuple`` to also define
+item types:
+
+.. code-block:: python
+
+    from typing import NamedTuple
+
+    Point = NamedTuple('Point', [('x', int),
+                                 ('y', int)])
+    p = Point(x=1, y='x')  # Argument has incompatible type "str"; expected "int"
+
+Python 3.6 will have an alternative, class-based syntax for named tuples with types.
+Mypy supports it already:
+
+.. code-block:: python
+
+    from typing import NamedTuple
+
+    class Point(NamedTuple):
+        x: int
+        y: int
+
+    p = Point(x=1, y='x')  # Argument has incompatible type "str"; expected "int"
+
+.. _type-of-class:
+
+The type of class objects
+*************************
+
+(Freely after `PEP 484
+<https://www.python.org/dev/peps/pep-0484/#the-type-of-class-objects>`_.)
+
+Sometimes you want to talk about class objects that inherit from a
+given class.  This can be spelled as ``Type[C]`` where ``C`` is a
+class.  In other words, when ``C`` is the name of a class, using ``C``
+to annotate an argument declares that the argument is an instance of
+``C`` (or of a subclass of ``C``), but using ``Type[C]`` as an
+argument annotation declares that the argument is a class object
+deriving from ``C`` (or ``C`` itself).
+
+For example, assume the following classes:
+
+.. code-block:: python
+
+   class User:
+       # Defines fields like name, email
+
+   class BasicUser(User):
+       def upgrade(self):
+           """Upgrade to Pro"""
+
+   class ProUser(User):
+       def pay(self):
+           """Pay bill"""
+
+Note that ``ProUser`` doesn't inherit from ``BasicUser``.
+
+Here's a function that creates an instance of one of these classes if
+you pass it the right class object:
+
+.. code-block:: python
+
+   def new_user(user_class):
+       user = user_class()
+       # (Here we could write the user object to a database)
+       return user
+
+How would we annotate this function?  Without ``Type[]`` the best we
+could do would be:
+
+.. code-block:: python
+
+   def new_user(user_class: type) -> User:
+       # Same  implementation as before
+
+This seems reasonable, except that in the following example, mypy
+doesn't see that the ``buyer`` variable has type ``ProUser``:
+
+.. code-block:: python
+
+   buyer = new_user(ProUser)
+   buyer.pay()  # Rejected, not a method on User
+
+However, using ``Type[]`` and a type variable with an upper bound (see
+:ref:`type-variable-upper-bound`) we can do better:
+
+.. code-block:: python
+
+   U = TypeVar('U', bound=User)
+
+   def new_user(user_class: Type[U]) -> U:
+       # Same  implementation as before
+
+Now mypy will infer the correct type of the result when we call
+``new_user()`` with a specific subclass of ``User``:
+
+.. code-block:: python
+
+   beginner = new_user(BasicUser)  # Inferred type is BasicUser
+   beginner.upgrade()  # OK
+
+.. note::
+
+   The value corresponding to ``Type[C]`` must be an actual class
+   object that's a subtype of ``C``.  Its constructor must be
+   compatible with the constructor of ``C``.  If ``C`` is a type
+   variable, its upper bound must be a class object.
+
+For more details about ``Type[]`` see `PEP 484
+<https://www.python.org/dev/peps/pep-0484/#the-type-of-class-objects>`_.
+
+.. _text-and-anystr:
+
+Text and AnyStr
+***************
+
+Sometimes you may want to write a function which will accept only unicode
+strings. This can be challenging to do in a codebase intended to run in
+both Python 2 and Python 3 since ``str`` means something different in both
+versions and ``unicode`` is not a keyword in Python 3.
+
+To help solve this issue, use ``typing.Text`` which is aliased to
+``unicode`` in Python 2 and to ``str`` in Python 3. This allows you to
+indicate that a function should accept only unicode strings in a
+cross-compatible way:
+
+.. code-block:: python
+
+   from typing import Text
+
+   def unicode_only(s: Text) -> Text:
+       return s + u'\u2713'
+
+In other cases, you may want to write a function that will work with any
+kind of string but will not let you mix two different string types. To do
+so use ``typing.AnyStr``:
+
+.. code-block:: python
+
+   from typing import AnyStr
+
+   def concat(x: AnyStr, y: AnyStr) -> AnyStr:
+       return x + y
+
+   concat('a', 'b')     # Okay
+   concat(b'a', b'b')   # Okay
+   concat('a', b'b')    # Error: cannot mix bytes and unicode
+
+For more details, see :ref:`type-variable-value-restriction`.
+
+.. note::
+
+   How ``bytes``, ``str``, and ``unicode`` are handled between Python 2 and
+   Python 3 may change in future versions of mypy.
+
+.. _generators:
+
+Generators
+**********
+
+A basic generator that only yields values can be annotated as having a return
+type of either ``Iterator[YieldType]`` or ``Iterable[YieldType]``. For example:
+
+.. code-block:: python
+
+   def squares(n: int) -> Iterator[int]:
+       for i in range(n):
+           yield i * i
+
+If you want your generator to accept values via the ``send`` method or return
+a value, you should use the
+``Generator[YieldType, SendType, ReturnType]`` generic type instead. For example:
+
+.. code-block:: python
+
+   def echo_round() -> Generator[int, float, str]:
+       sent = yield 0
+       while sent >= 0:
+           sent = yield round(sent)
+       return 'Done'
+
+Note that unlike many other generics in the typing module, the ``SendType`` of
+``Generator`` behaves contravariantly, not covariantly or invariantly.
+
+If you do not plan on recieving or returning values, then set the ``SendType``
+or ``ReturnType`` to ``None``, as appropriate. For example, we could have
+annotated the first example as the following:
+
+.. code-block:: python
+
+   def squares(n: int) -> Generator[int, None, None]:
+       for i in range(n):
+           yield i * i
+
+.. _async-and-await:
+
+Typing async/await
+******************
+
+Mypy supports the ability to type coroutines that use the ``async/await``
+syntax introduced in Python 3.5. For more information regarding coroutines and
+this new syntax, see `PEP 492 <https://www.python.org/dev/peps/pep-0492/>`_.
+
+Functions defined using ``async def`` are typed just like normal functions.
+The return type annotation should be the same as the type of the value you
+expect to get back when ``await``-ing the coroutine.
+
+.. code-block:: python
+
+   import asyncio
+
+   async def format_string(tag: str, count: int) -> str:
+       return 'T-minus {} ({})'.format(count, tag)
+
+   async def countdown_1(tag: str, count: int) -> str:
+       while count > 0:
+           my_str = await format_string(tag, count)  # has type 'str'
+           print(my_str)
+           await asyncio.sleep(0.1)
+           count -= 1
+       return "Blastoff!"
+
+   loop = asyncio.get_event_loop()
+   loop.run_until_complete(countdown_1("Millennium Falcon", 5))
+   loop.close()
+
+The result of calling an ``async def`` function *without awaiting* will be a
+value of type ``Awaitable[T]``:
+
+.. code-block:: python
+
+   my_coroutine = countdown_1("Millennium Falcon", 5)
+   reveal_type(my_coroutine)  # has type 'Awaitable[str]'
+
+.. note::
+
+    :ref:`reveal_type() <reveal-type>` displays the inferred static type of
+    an expression.
+
+If you want to use coroutines in older versions of Python that do not support
+the ``async def`` syntax, you can instead use the ``@asyncio.coroutine``
+decorator to convert a generator into a coroutine.
+
+Note that we set the ``YieldType`` of the generator to be ``Any`` in the
+following example. This is because the exact yield type is an implementation
+detail of the coroutine runner (e.g. the ``asyncio`` event loop) and your
+coroutine shouldn't have to know or care about what precisely that type is.
+
+.. code-block:: python
+
+   from typing import Any, Generator
+   import asyncio
+
+   @asyncio.coroutine
+   def countdown_2(tag: str, count: int) -> Generator[Any, None, str]:
+       while count > 0:
+           print('T-minus {} ({})'.format(count, tag))
+           yield from asyncio.sleep(0.1)
+           count -= 1
+      return "Blastoff!"
+
+   loop = asyncio.get_event_loop()
+   loop.run_until_complete(countdown_2("USS Enterprise", 5))
+   loop.close()
+
+As before, the result of calling a generator decorated with ``@asyncio.coroutine``
+will be a value of type ``Awaitable[T]``.
+
+.. note::
+
+   At runtime, you are allowed to add the ``@asyncio.coroutine`` decorator to
+   both functions and generators. This is useful when you want to mark a
+   work-in-progress function as a coroutine, but have not yet added ``yield`` or
+   ``yield from`` statements:
+
+   .. code-block:: python
+
+      import asyncio
+
+      @asyncio.coroutine
+      def serialize(obj: object) -> str:
+          # todo: add yield/yield from to turn this into a generator
+          return "placeholder"
+
+   However, mypy currently does not support converting functions into
+   coroutines. Support for this feature will be added in a future version, but
+   for now, you can manually force the function to be a generator by doing
+   something like this:
+
+   .. code-block:: python
+
+      from typing import Generator
+      import asyncio
+
+      @asyncio.coroutine
+      def serialize(obj: object) -> Generator[None, None, str]:
+          # todo: add yield/yield from to turn this into a generator
+          if False:
+              yield
+          return "placeholder"
+
+You may also choose to create a subclass of ``Awaitable`` instead:
+
+.. code-block:: python
+
+   from typing import Any, Awaitable, Generator
+   import asyncio
+
+   class MyAwaitable(Awaitable[str]):
+       def __init__(self, tag: str, count: int) -> None:
+           self.tag = tag
+           self.count = count
+
+       def __await__(self) -> Generator[Any, None, str]:
+           for i in range(n, 0, -1):
+               print('T-minus {} ({})'.format(i, tag))
+               yield from asyncio.sleep(0.1)
+           return "Blastoff!"
+
+   def countdown_3(tag: str, count: int) -> Awaitable[str]:
+       return MyAwaitable(tag, count)
+
+   loop = asyncio.get_event_loop()
+   loop.run_until_complete(countdown_3("Heart of Gold", 5))
+   loop.close()
+
+To create an iterable coroutine, subclass ``AsyncIterator``:
+
+.. code-block:: python
+
+   from typing import Optional, AsyncIterator
+   import asyncio
+
+   class arange(AsyncIterator[int]):
+       def __init__(self, start: int, stop: int, step: int) -> None:
+           self.start = start
+           self.stop = stop
+           self.step = step
+           self.count = start - step
+
+       def __aiter__(self) -> AsyncIterator[int]:
+           return self
+
+       async def __anext__(self) -> int:
+           self.count += self.step
+           if self.count == self.stop:
+               raise StopAsyncIteration
+           else:
+               return self.count
+
+   async def countdown_4(tag: str, n: int) -> str:
+       async for i in arange(n, 0, -1):
+           print('T-minus {} ({})'.format(i, tag))
+           await asyncio.sleep(0.1)
+       return "Blastoff!"
+
+   loop = asyncio.get_event_loop()
+   loop.run_until_complete(countdown_4("Serenity", 5))
+   loop.close()
+
+For a more concrete example, the mypy repo has a toy webcrawler that
+demonstrates how to work with coroutines. One version
+`uses async/await <https://github.com/python/mypy/blob/master/test-data/samples/crawl2.py>`_
+and one
+`uses yield from <https://github.com/python/mypy/blob/master/test-data/samples/crawl.py>`_.
+
+.. _typeddict:
+
+TypedDict
+*********
+
+.. note::
+
+   TypedDict is not yet an officially supported feature.  It may not work reliably,
+   and details of TypedDict may change in future mypy releases.
+
+Python programs often use dictionaries with string keys to represent objects.
+Here is a typical example:
+
+.. code-block:: python
+
+   movie = {'name': 'Blade Runner', 'year': 1982}
+
+Only a fixed set of string keys is expected (``'name'`` and
+``'year'`` above), and each key has an independent value type (``str``
+for ``'name'`` and ``int`` for ``'year'`` above). We've previously
+seen the ``Dict[K, V]`` type, which lets you declare uniform
+dictionary types, where every value has the same type, and arbitrary keys
+are supported. This is clearly not a good fit for
+``movie`` above. Instead, you can use a ``TypedDict`` to give a precise
+type for objects like ``movie``, where the type of each
+dictionary value depends on the key:
+
+.. code-block:: python
+
+   from mypy_extensions import TypedDict
+
+   Movie = TypedDict('Movie', {'name': str, 'year': int})
+
+   movie = {'name': 'Blade Runner', 'year': 1982}  # type: Movie
+
+``Movie`` is a TypedDict type with two items: ``'name'`` (with type ``str``)
+and ``'year'`` (with type ``int``). Note that we used an explicit type
+annotation for the ``movie`` variable. This type annotation is
+important -- without it, mypy will try to infer a regular, uniform
+``Dict`` type for ``movie``, which is not what we want here.
+
+.. note::
+
+   If you pass a TypedDict object as an argument to a function, no
+   type annotation is usually necessary since mypy can infer the
+   desired type based on the declared argument type. Also, if an
+   assignment target has been previously defined, and it has a
+   TypedDict type, mypy will treat the assigned value as a TypedDict,
+   not ``Dict``.
+
+Now mypy will recognize these as valid:
+
+.. code-block:: python
+
+   name = movie['name']  # Okay; type of name is str
+   year = movie['year']  # Okay; type of year is int
+
+Mypy will detect an invalid key as an error:
+
+.. code-block:: python
+
+   director = movie['director']  # Error: 'director' is not a valid key
+
+Mypy will also reject a runtime-computed expression as a key, as
+it can't verify that it's a valid key. You can only use string
+literals as TypedDict keys.
+
+The ``TypedDict`` type object can also act as a constructor. It
+returns a normal ``dict`` object at runtime -- a ``TypedDict`` does
+not define a new runtime type:
+
+.. code-block:: python
+
+   toy_story = Movie(name='Toy Story', year=1995)
+
+This is equivalent to just constructing a dictionary directly using
+``{ ... }`` or ``dict(key=value, ...)``. The constructor form is
+sometimes convenient, since it can be used without a type annotation,
+and it also makes the type of the object explicit.
+
+Like all types, TypedDicts can be used as components to build
+arbitrarily complex types. For example, you can define nested
+TypedDicts and containers with TypedDict items.
+Unlike most other types, mypy uses structural compatibility checking
+(or structural subtyping) with TypedDicts. A TypedDict object with
+extra items is compatible with a narrower TypedDict, assuming item
+types are compatible (*totality* also affects
+subtyping, as discussed below).
+
+.. note::
+
+   You need to install ``mypy_extensions`` using pip to use ``TypedDict``:
+
+   .. code-block:: text
+
+       python3 -m pip install --upgrade mypy-extensions
+
+   Or, if you are using Python 2:
+
+   .. code-block:: text
+
+       pip install --upgrade mypy-extensions
+
+Totality
+--------
+
+By default mypy ensures that a TypedDict object has all the specified
+keys. This will be flagged as an error:
+
+.. code-block:: python
+
+   # Error: 'year' missing
+   toy_story = {'name': 'Toy Story'}  # type: Movie
+
+Sometimes you want to allow keys to be left out when creating a
+TypedDict object. You can provide the ``total=False`` argument to
+``TypedDict(...)`` to achieve this:
+
+.. code-block:: python
+
+   GuiOptions = TypedDict(
+       'GuiOptions', {'language': str, 'color': str}, total=False)
+   options = {}  # type: GuiOptions  # Okay
+   options['language'] = 'en'
+
+You may need to use ``get()`` to access items of a partial (non-total)
+TypedDict, since indexing using ``[]`` could fail at runtime.
+However, mypy still lets use ``[]`` with a partial TypedDict -- you
+just need to be careful with it, as it could result in a ``KeyError``.
+Requiring ``get()`` everywhere would be too cumbersome. (Note that you
+are free to use ``get()`` with total TypedDicts as well.)
+
+Keys that aren't required are shown with a ``?`` in error messages:
+
+.. code-block:: python
+
+   # Revealed type is 'TypedDict('GuiOptions', {'language'?: builtins.str,
+   #                                            'color'?: builtins.str})'
+   reveal_type(options)
+
+Totality also affects structural compatibility. You can't use a partial
+TypedDict when a total one is expected. Also, a total typed dict is not
+valid when a partial one is expected.
+
+Class-based syntax
+------------------
+
+Python 3.6 supports an alternative, class-based syntax to define a
+TypedDict:
+
+.. code-block:: python
+
+   from mypy_extensions import TypedDict
+
+   class Movie(TypedDict):
+       name: str
+       year: int
+
+The above definition is equivalent to the original ``Movie``
+definition. It doesn't actually define a real class. This syntax also
+supports a form of inheritance -- subclasses can define additional
+items. However, this is primarily a notational shortcut. Since mypy
+uses structural compatibility with TypedDicts, inheritance is not
+required for compatibility. Here is an example of inheritance:
+
+.. code-block:: python
+
+   class Movie(TypedDict):
+       name: str
+       year: int
+
+   class BookBasedMovie(Movie):
+       based_on: str
+
+Now ``BookBasedMovie`` has keys ``name``, ``year`` and ``based_on``.
+
+Mixing required and non-required items
+--------------------------------------
+
+In addition to allowing reuse across TypedDict types, inheritance also allows
+you to mix required and non-required (using ``total=False``) items
+in a single TypedDict. Example:
+
+.. code-block:: python
+
+   class MovieBase(TypedDict):
+       name: str
+       year: int
+
+   class Movie(MovieBase, total=False):
+       based_on: str
+
+Now ``Movie`` has required keys ``name`` and ``year``, while ``based_on``
+can be left out when constructing an object. A TypedDict with a mix of required
+and non-required keys, such as ``Movie`` above, will only be compatible with
+another TypedDict if all required keys in the other TypedDict are required keys in the
+first TypedDict, and all non-required keys of the other TypedDict are also non-required keys
+in the first TypedDict.
diff --git a/docs/source/python2.rst b/docs/source/python2.rst
new file mode 100644
index 0000000..1267396
--- /dev/null
+++ b/docs/source/python2.rst
@@ -0,0 +1,130 @@
+.. _python2:
+
+Type checking Python 2 code
+===========================
+
+For code that needs to be Python 2.7 compatible, function type
+annotations are given in comments, since the function annotation
+syntax was introduced in Python 3. The comment-based syntax is
+specified in `PEP 484 <https://www.python.org/dev/peps/pep-0484>`_.
+
+Run mypy in Python 2 mode by using the ``--py2`` option::
+
+    $ mypy --py2 program.py
+
+To run your program, you must have the ``typing`` module in your
+Python 2 module search path. Use ``pip install typing`` to install the
+module. This also works for Python 3 versions prior to 3.5 that don't
+include ``typing`` in the standard library.
+
+The example below illustrates the Python 2 function type annotation
+syntax. This syntax is also valid in Python 3 mode:
+
+.. code-block:: python
+
+    from typing import List
+
+    def hello(): # type: () -> None
+        print 'hello'
+
+    class Example:
+        def method(self, lst, opt=0, *args, **kwargs):
+            # type: (List[str], int, *str, **bool) -> int
+            """Docstring comes after type comment."""
+            ...
+
+It's worth going through these details carefully to avoid surprises:
+
+- You don't provide an annotation for the ``self`` / ``cls`` variable of
+  methods.
+
+- Docstring always comes *after* the type comment.
+
+- For ``*args`` and ``**kwargs`` the type should be prefixed with
+  ``*`` or ``**``, respectively (except when using the multi-line
+  annotation syntax described below). Again, the above example
+  illustrates this.
+
+- Things like ``Any`` must be imported from ``typing``, even if they
+  are only used in comments.
+
+- In Python 2 mode ``str`` is implicitly promoted to ``unicode``, similar
+  to how ``int`` is compatible with ``float``. This is unlike ``bytes`` and
+  ``str`` in Python 3, which are incompatible. ``bytes`` in Python 2 is
+  equivalent to ``str``. (This might change in the future.)
+
+.. _multi_line_annotation:
+
+Multi-line Python 2 function annotations
+----------------------------------------
+
+Mypy also supports a multi-line comment annotation syntax. You
+can provide a separate annotation for each argument using the variable
+annotation syntax. When using the single-line annotation syntax
+described above, functions with long argument lists tend to result in
+overly long type comments and it's often tricky to see which argument
+type corresponds to which argument. The alternative, multi-line
+annotation syntax makes long annotations easier to read and write.
+
+Here is an example (from PEP 484):
+
+.. code-block:: python
+
+    def send_email(address,     # type: Union[str, List[str]]
+                   sender,      # type: str
+                   cc,          # type: Optional[List[str]]
+                   bcc,         # type: Optional[List[str]]
+                   subject='',
+                   body=None    # type: List[str]
+                   ):
+        # type: (...) -> bool
+        """Send an email message.  Return True if successful."""
+        <code>
+
+You write a separate annotation for each function argument on the same
+line as the argument. Each annotation must be on a separate line. If
+you leave out an annotation for an argument, it defaults to
+``Any``. You provide a return type annotation in the body of the
+function using the form ``# type: (...) -> rt``, where ``rt`` is the
+return type. Note that the  return type annotation contains literal
+three dots.
+
+Note that when using multi-line comments, you do not need to prefix the
+types of your ``*arg`` and ``**kwarg`` parameters with ``*`` or ``**``.
+For example, here is how you would annotate the first example using
+multi-line comments.
+
+.. code-block:: python
+
+    from typing import List
+
+    class Example:
+        def method(self,
+                   lst,      # type: List[str]
+                   opt=0,    # type: int
+                   *args,    # type: str
+                   **kwargs  # type: bool
+                   ):
+            # type: (...) -> int
+            """Docstring comes after type comment."""
+            ...
+
+
+Additional notes
+----------------
+
+- You should include types for arguments with default values in the
+  annotation. The ``opt`` argument of ``method`` in the example at the
+  beginning of this section is an example of this.
+
+- The annotation can be on the same line as the function header or on
+  the following line.
+
+- The type syntax for variables is the same as for Python 3.
+
+- You don't need to use string literal escapes for forward references
+  within comments.
+
+- Mypy uses a separate set of library stub files in `typeshed
+  <https://github.com/python/typeshed>`_ for Python 2. Library support
+  may vary between Python 2 and Python 3.
diff --git a/docs/source/python36.rst b/docs/source/python36.rst
new file mode 100644
index 0000000..f1f7322
--- /dev/null
+++ b/docs/source/python36.rst
@@ -0,0 +1,96 @@
+.. _python-36:
+
+New features in Python 3.6
+==========================
+
+Python 3.6 was `released
+<https://www.python.org/downloads/release/python-360/>`_ in
+December 2016.  As of mypy 0.510 all language features new in Python
+3.6 are supported.
+
+Syntax for variable annotations (`PEP 526 <https://www.python.org/dev/peps/pep-0526>`_)
+---------------------------------------------------------------------------------------
+
+Python 3.6 feature: variables (in global, class or local scope) can
+now have type annotations using either of the two forms:
+
+.. code-block:: python
+
+   from typing import Optional
+   foo: Optional[int]
+   bar: List[str] = []
+
+Mypy fully supports this syntax, interpreting them as equivalent to
+
+.. code-block:: python
+
+   foo = None  # type: Optional[int]
+   bar = []  # type: List[str]
+
+.. _class-var:
+
+An additional feature defined in PEP 526 is also supported: you can
+mark names intended to be used as class variables with ``ClassVar``.
+In a pinch you can also use ClassVar in ``# type`` comments.
+Example:
+
+.. code-block:: python
+
+   from typing import ClassVar
+
+   class C:
+       x: int  # instance variable
+       y: ClassVar[int]  # class variable
+       z = None  # type: ClassVar[int]
+
+       def foo(self) -> None:
+           self.x = 0  # OK
+           self.y = 0  # Error: Cannot assign to class variable "y" via instance
+
+   C.y = 0  # This is OK
+
+
+Literal string formatting (`PEP 498 <https://www.python.org/dev/peps/pep-0498>`_)
+---------------------------------------------------------------------------------
+
+Python 3.6 feature: string literals of the form
+``f"text {expression} text"`` evaluate ``expression`` using the
+current evaluation context (locals and globals).
+
+Mypy fully supports this syntax and type-checks the ``expression``.
+
+Underscores in numeric literals (`PEP 515 <https://www.python.org/dev/peps/pep-0515>`_)
+---------------------------------------------------------------------------------------
+
+Python 3.6 feature: numeric literals can contain underscores,
+e.g. ``1_000_000``.
+
+Mypy fully supports this syntax:
+
+.. code-block:: python
+
+   precise_val = 1_000_000.000_000_1
+   hexes: List[int] = []
+   hexes.append(0x_FF_FF_FF_FF)
+
+.. _async_generators_and_comprehensions:
+
+Asynchronous generators (`PEP 525 <https://www.python.org/dev/peps/pep-0525>`_) and comprehensions (`PEP 530 <https://www.python.org/dev/peps/pep-0530>`_)
+----------------------------------------------------------------------------------------------------------------------------------------------------------
+
+Python 3.6 allows coroutines defined with ``async def`` (PEP 492) to be
+generators, i.e. contain ``yield`` expressions, and introduces a syntax for
+asynchronous comprehensions. Mypy fully supports these features, for example:
+
+.. code-block:: python
+
+   from typing import AsyncIterator
+
+   async def gen() -> AsyncIterator[bytes]:
+       lst = [b async for b in gen()]  # Inferred type is "List[bytes]"
+       yield 'no way'  # Error: Incompatible types (got "str", expected "bytes")
+
+New named tuple syntax
+----------------------
+
+Python 3.6 supports an alternative syntax for named tuples. See :ref:`named-tuples`.
diff --git a/docs/source/revision_history.rst b/docs/source/revision_history.rst
new file mode 100644
index 0000000..dadc795
--- /dev/null
+++ b/docs/source/revision_history.rst
@@ -0,0 +1,245 @@
+Revision history
+================
+
+List of major changes:
+
+- July 2017
+    * Publish ``mypy`` version 0.521 on PyPI.
+
+    * Publish ``mypy`` version 0.520 on PyPI.
+
+    * Add :ref:`fine-grained control of Any types <disallow-any>`.
+
+    * Add :ref:`typeddict`.
+
+    * Other updates to :ref:`command-line`:
+
+      * Add ``--no-implicit-optional``.
+
+      * Add ``--shadow-file``.
+
+      * Add ``--no-incremental``.
+
+- May 2017
+    * Publish ``mypy`` version 0.510 on PyPI.
+
+    * Remove option ``--no-fast-parser``.
+
+    * Deprecate option ``--strict-boolean``.
+
+    * Drop support for Python 3.2 as type checking target.
+
+    * Add support for :ref:`overloaded functions with implementations <function-overloading>`.
+
+    * Add :ref:`extended_callable`.
+
+    * Add :ref:`async_generators_and_comprehensions`.
+
+    * Add :ref:`ClassVar <class-var>`.
+
+    * Add :ref:`quick mode <quick-mode>`.
+
+- March 2017
+    * Publish ``mypy`` version 0.500 on PyPI.
+
+    * Add :ref:`noreturn`.
+
+    * Add :ref:`generic-subclasses`.
+
+    * Add :ref:`variance-of-generics`.
+
+    * Add :ref:`variance`.
+
+    * Updates to :ref:`python-36`.
+
+    * Updates to :ref:`integrating-mypy`.
+
+    * Updates to :ref:`command-line`:
+
+      * Add option ``--warn-return-any``.
+
+      * Add option ``--strict-boolean``.
+
+      * Add option ``--strict``.
+
+    * Updates to :ref:`config-file`:
+
+      * ``warn_no_return`` is on by default.
+
+      * Read settings from ``setup.cfg`` if ``mypy.ini`` does not exist.
+
+      * Add option ``warn_return_any``.
+
+      * Add option ``strict_boolean``.
+
+- January 2017
+    * Publish ``mypy`` version 0.470 on PyPI.
+
+    * Change package name from ``mypy-lang`` to ``mypy``.
+
+    * Add :ref:`integrating-mypy`.
+
+    * Add :ref:`cheat-sheet-py3`.
+
+    * Major update to :ref:`finding-imports`.
+
+    * Add :ref:`--ignore-missing-imports <ignore-missing-imports>`.
+
+    * Updates to :ref:`config-file`.
+
+    * Document underscore support in numeric literals.
+
+    * Document that arguments prefixed with ``__`` are positional-only.
+
+    * Document that ``--hide-error-context`` is now on by default,
+      and there is a new flag ``--show-error-context``.
+
+    * Add ``ignore_errors`` to :ref:`per-module-flags`.
+
+- November 2016
+    * Publish ``mypy-lang`` version 0.4.6 on PyPI.
+
+    * Add :ref:`getting-started`.
+
+    * Add :ref:`generic-methods-and-generic-self` (experimental).
+
+    * Add :ref:`declaring-decorators`.
+
+    * Discuss generic type aliases in :ref:`type-aliases`.
+
+    * Discuss Python 3.6 named tuple syntax in :ref:`named-tuples`.
+
+    * Updates to :ref:`common_issues`.
+
+    * Updates to :ref:`python-36`.
+
+    * Updates to :ref:`command-line`:
+
+      * ``--custom-typeshed-dir``
+
+      * ``--junit-xml``
+
+      * ``--find-occurrences``
+
+      * ``--cobertura-xml-report``
+
+      * ``--warn-no-return``
+
+    * Updates to :ref:`config-file`:
+
+      * Sections with fnmatch patterns now use
+        module name patterns (previously they were path patterns).
+      * Added ``custom_typeshed_dir``, ``mypy_path`` and ``show_column_numbers``.
+
+    * Mention the magic ``MYPY`` constant in :ref:`import-cycles`.
+
+- October 2016
+    * Publish ``mypy-lang`` version 0.4.5 on PyPI.
+
+    * Add :ref:`python-36`.
+
+    * Add :ref:`config-file`.
+
+    * Updates to :ref:`command-line`: ``--strict-optional-white-list``,
+      ``--disallow-subclassing-any``, ``--config-file``, ``@flagfile``,
+      ``--hide-error-context`` (replaces ``--suppress-error-context``),
+      ``--show-column-numbers`` and ``--scripts-are-modules``.
+
+    * Mention ``typing.TYPE_CHECKING`` in :ref:`import-cycles`.
+
+- August 2016
+    * Publish ``mypy-lang`` version 0.4.4 on PyPI.
+
+    * Add :ref:`newtypes`.
+
+    * Add :ref:`async-and-await`.
+
+    * Add :ref:`text-and-anystr`.
+
+    * Add :ref:`version_and_platform_checks`.
+
+- July 2016
+    * Publish ``mypy-lang`` version 0.4.3 on PyPI.
+
+    * Add :ref:`strict_optional`.
+
+    * Add :ref:`multi_line_annotation`.
+
+- June 2016
+    * Publish ``mypy-lang`` version 0.4.2 on PyPI.
+
+    * Add :ref:`type-of-class`.
+
+    * Add :ref:`cheat-sheet-py2`.
+
+    * Add :ref:`reveal-type`.
+
+- May 2016
+    * Publish ``mypy-lang`` version 0.4 on PyPI.
+
+    * Add :ref:`type-variable-upper-bound`.
+
+    * Document :ref:`command-line`.
+
+- Feb 2016
+    * Publish ``mypy-lang`` version 0.3.1 on PyPI.
+
+    * Document Python 2 support.
+
+- Nov 2015
+    Add :ref:`library-stubs`.
+
+- Jun 2015
+    Remove ``Undefined`` and ``Dynamic``, as they are not in PEP 484.
+
+- Apr 2015
+    Publish ``mypy-lang`` version 0.2.0 on PyPI.
+
+- Mar 2015
+    Update documentation to reflect PEP 484:
+
+    * Add :ref:`named-tuples` and :ref:`optional`.
+
+    * Do not mention type application syntax (for
+      example, ``List[int]()``), as it's no longer supported,
+      due to PEP 484 compatibility.
+
+    * Rename ``typevar`` to ``TypeVar``.
+
+    * Document ``# type: ignore`` which allows
+      locally ignoring spurious errors (:ref:`silencing_checker`).
+
+    * No longer mention
+      ``Any(x)`` as a valid cast, as it will be phased out soon.
+
+    * Mention the new ``.pyi`` stub file extension. Stubs can live
+      in the same directory as the rest of the program.
+
+- Jan 2015
+    Mypy moves closer to PEP 484:
+
+    * Add :ref:`type-aliases`.
+
+    * Update discussion of overloading -- it's now only supported in stubs.
+
+    * Rename ``Function[...]`` to ``Callable[...]``.
+
+- Dec 2014
+    Publish mypy version 0.1.0 on PyPI.
+
+- Oct 2014
+    Major restructuring.
+    Split the HTML documentation into
+    multiple pages.
+
+- Sep 2014
+    Migrated docs to Sphinx.
+
+- Aug 2014
+    Don't discuss native semantics. There is only Python
+    semantics.
+
+- Jul 2013
+    Rewrite to use new syntax. Shift focus to discussing
+    Python semantics. Add more content, including short discussions of
+    :ref:`generic-functions` and :ref:`union-types`.
diff --git a/docs/source/supported_python_features.rst b/docs/source/supported_python_features.rst
new file mode 100644
index 0000000..ca68d4c
--- /dev/null
+++ b/docs/source/supported_python_features.rst
@@ -0,0 +1,20 @@
+Supported Python features and modules
+=====================================
+
+A list of unsupported Python features is maintained in the mypy wiki:
+
+- `Unsupported Python features <https://github.com/python/mypy/wiki/Unsupported-Python-Features>`_
+
+Runtime definition of methods and functions
+*******************************************
+
+By default, mypy will complain if you add a function to a class
+or module outside its definition -- but only if this is visible to the
+type checker. This only affects static checking, as mypy performs no
+additional type checking at runtime. You can easily work around
+this. For example, you can use dynamically typed code or values with
+``Any`` types, or you can use ``setattr`` or other introspection
+features. However, you need to be careful if you decide to do this. If
+used indiscriminately, you may have difficulty using static typing
+effectively, since the type checker cannot see functions defined at
+runtime.
diff --git a/docs/source/type_inference_and_annotations.rst b/docs/source/type_inference_and_annotations.rst
new file mode 100644
index 0000000..76e9cf9
--- /dev/null
+++ b/docs/source/type_inference_and_annotations.rst
@@ -0,0 +1,172 @@
+Type inference and type annotations
+===================================
+
+Type inference
+**************
+
+The initial assignment defines a variable. If you do not explicitly
+specify the type of the variable, mypy infers the type based on the
+static type of the value expression:
+
+.. code-block:: python
+
+   i = 1           # Infer type int for i
+   l = [1, 2]      # Infer type List[int] for l
+
+Type inference is bidirectional and takes context into account. For
+example, the following is valid:
+
+.. code-block:: python
+
+   def f(l: List[object]) -> None:
+       l = [1, 2]  # Infer type List[object] for [1, 2]
+
+In an assignment, the type context is determined by the assignment
+target. In this case this is ``l``, which has the type
+``List[object]``. The value expression ``[1, 2]`` is type checked in
+this context and given the type ``List[object]``. In the previous
+example we introduced a new variable ``l``, and here the type context
+was empty.
+
+Note that the following is not valid, since ``List[int]`` is not
+compatible with ``List[object]``:
+
+.. code-block:: python
+
+   def f(l: List[object], k: List[int]) -> None:
+       l = k       # Type check error: incompatible types in assignment
+
+The reason why the above assignment is disallowed is that allowing the
+assignment could result in non-int values stored in a list of ``int``:
+
+.. code-block:: python
+
+   def f(l: List[object], k: List[int]) -> None:
+       l = k
+       l.append('x')
+       print(k[-1])  # Ouch; a string in List[int]
+
+You can still run the above program; it prints ``x``. This illustrates
+the fact that static types are used during type checking, but they do
+not affect the runtime behavior of programs. You can run programs with
+type check failures, which is often very handy when performing a large
+refactoring. Thus you can always 'work around' the type system, and it
+doesn't really limit what you can do in your program.
+
+Type inference is not used in dynamically typed functions (those
+without an explicit return type) — every local variable type defaults
+to ``Any``, which is discussed later.
+
+Explicit types for variables
+****************************
+
+You can override the inferred type of a variable by using a
+special type comment after an assignment statement:
+
+.. code-block:: python
+
+   x = 1  # type: Union[int, str]
+
+Without the type comment, the type of ``x`` would be just ``int``. We
+use an annotation to give it a more general type ``Union[int, str]``.
+Mypy checks that the type of the initializer is compatible with the
+declared type. The following example is not valid, since the initializer is
+a floating point number, and this is incompatible with the declared
+type:
+
+.. code-block:: python
+
+   x = 1.1  # type: Union[int, str]  # Error!
+
+.. note::
+
+   The best way to think about this is that the type comment sets the
+   type of the variable, not the type of the expression. To force the
+   type of an expression you can use ``cast(<type>, <expression>)``.
+
+Explicit types for collections
+******************************
+
+The type checker cannot always infer the type of a list or a
+dictionary. This often arises when creating an empty list or
+dictionary and assigning it to a new variable that doesn't have an explicit
+variable type. In these cases you can give the type explicitly using
+a type annotation comment:
+
+.. code-block:: python
+
+   l = []  # type: List[int]       # Create empty list with type List[int]
+   d = {}  # type: Dict[str, int]  # Create empty dictionary (str -> int)
+
+Similarly, you can also give an explicit type when creating an empty set:
+
+.. code-block:: python
+
+   s = set()  # type: Set[int]
+
+Declaring multiple variable types at a time
+*******************************************
+
+You can declare more than a single variable at a time. In order to
+nicely work with multiple assignment, you must give each variable a
+type separately:
+
+.. code-block:: python
+
+   i, found = 0, False # type: int, bool
+
+You can optionally use parentheses around the types, assignment targets
+and assigned expression:
+
+.. code-block:: python
+
+   i, found = 0, False # type: (int, bool)      # OK
+   (i, found) = 0, False # type: int, bool      # OK
+   i, found = (0, False) # type: int, bool      # OK
+   (i, found) = (0, False) # type: (int, bool)  # OK
+
+Starred expressions
+*******************
+
+In most cases, mypy can infer the type of starred expressions from the
+right-hand side of an assignment, but not always:
+
+.. code-block:: python
+
+    a, *bs = 1, 2, 3   # OK
+    p, q, *rs = 1, 2   # Error: Type of rs cannot be inferred
+
+On first line, the type of ``bs`` is inferred to be
+``List[int]``. However, on the second line, mypy cannot infer the type
+of ``rs``, because there is no right-hand side value for ``rs`` to
+infer the type from. In cases like these, the starred expression needs
+to be annotated with a starred type:
+
+.. code-block:: python
+
+    p, q, *rs = 1, 2  # type: int, int, *List[int]
+
+Here, the type of ``rs`` is set to ``List[int]``.
+
+Types in stub files
+*******************
+
+:ref:`Stub files <library-stubs>` are written in normal Python 3
+syntax, but generally leaving out runtime logic like variable
+initializers, function bodies, and default arguments, replacing them
+with ellipses.
+
+In this example, each ellipsis ``...`` is literally written in the
+stub file as three dots:
+
+.. code-block:: python
+
+    x = ...  # type: int
+    def afunc(code: str) -> int: ...
+    def afunc(a: int, b: int=...) -> int: ...
+
+.. note::
+
+    The ellipsis ``...`` is also used with a different meaning in
+    :ref:`callable types <callable-types>` and :ref:`tuple types
+    <tuple-types>`.
diff --git a/extensions/README.md b/extensions/README.md
new file mode 100644
index 0000000..73b786b
--- /dev/null
+++ b/extensions/README.md
@@ -0,0 +1,6 @@
+Mypy Extensions
+===============
+
+The "mypy_extensions" module defines experimental extensions to the
+standard "typing" module that are supported by the mypy typechecker.
+
diff --git a/extensions/mypy_extensions.py b/extensions/mypy_extensions.py
new file mode 100644
index 0000000..c711e00
--- /dev/null
+++ b/extensions/mypy_extensions.py
@@ -0,0 +1,137 @@
+"""Defines experimental extensions to the standard "typing" module that are
+supported by the mypy typechecker.
+
+Example usage:
+    from mypy_extensions import TypedDict
+"""
+
+from typing import Any
+
+# NOTE: This module must support Python 2.7 in addition to Python 3.x
+
+import sys
+# _type_check is NOT a part of public typing API, it is used here only to mimic
+# the (convenient) behavior of types provided by typing module.
+from typing import _type_check  # type: ignore
+
+
+def _check_fails(cls, other):
+    try:
+        if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools', 'typing']:
+            # Typed dicts are only for static structural subtyping.
+            raise TypeError('TypedDict does not support instance and class checks')
+    except (AttributeError, ValueError):
+        pass
+    return False
+
+
+def _dict_new(cls, *args, **kwargs):
+    return dict(*args, **kwargs)
+
+
+def _typeddict_new(cls, _typename, _fields=None, **kwargs):
+    total = kwargs.pop('total', True)
+    if _fields is None:
+        _fields = kwargs
+    elif kwargs:
+        raise TypeError("TypedDict takes either a dict or keyword arguments,"
+                        " but not both")
+    return _TypedDictMeta(_typename, (), {'__annotations__': dict(_fields),
+                                          '__total__': total})
+
+
+class _TypedDictMeta(type):
+    def __new__(cls, name, bases, ns, total=True):
+        # Create new typed dict class object.
+        # This method is called directly when TypedDict is subclassed,
+        # or via _typeddict_new when TypedDict is instantiated. This way
+        # TypedDict supports all three syntaxes described in its docstring.
+        # Subclasses and instanes of TypedDict return actual dictionaries
+        # via _dict_new.
+        ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
+        tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns)
+        try:
+            # Setting correct module is necessary to make typed dict classes pickleable.
+            tp_dict.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__')
+        except (AttributeError, ValueError):
+            pass
+        anns = ns.get('__annotations__', {})
+        msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+        anns = {n: _type_check(tp, msg) for n, tp in anns.items()}
+        for base in bases:
+            anns.update(base.__dict__.get('__annotations__', {}))
+        tp_dict.__annotations__ = anns
+        if not hasattr(tp_dict, '__total__'):
+            tp_dict.__total__ = total
+        return tp_dict
+
+    __instancecheck__ = __subclasscheck__ = _check_fails
+
+
+TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
+TypedDict.__module__ = __name__
+TypedDict.__doc__ = \
+    """A simple typed name space. At runtime it is equivalent to a plain dict.
+
+    TypedDict creates a dictionary type that expects all of its
+    instances to have a certain set of keys, with each key
+    associated with a value of a consistent type. This expectation
+    is not checked at runtime but is only enforced by typecheckers.
+    Usage::
+
+        Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+        a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
+        b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
+        assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+    The type info could be accessed via Point2D.__annotations__. TypedDict
+    supports two additional equivalent forms::
+
+        Point2D = TypedDict('Point2D', x=int, y=int, label=str)
+
+        class Point2D(TypedDict):
+            x: int
+            y: int
+            label: str
+
+    The latter syntax is only supported in Python 3.6+, while two other
+    syntax forms work for Python 2.7 and 3.2+
+    """
+
+# Argument constructors for making more-detailed Callables. These all just
+# return their type argument, to make them complete noops in terms of the
+# `typing` module.
+
+
+def Arg(type=Any, name=None):
+    """A normal positional argument"""
+    return type
+
+
+def DefaultArg(type=Any, name=None):
+    """A positional argument with a default value"""
+    return type
+
+
+def NamedArg(type=Any, name=None):
+    """A keyword-only argument"""
+    return type
+
+
+def DefaultNamedArg(type=Any, name=None):
+    """A keyword-only argument with a default value"""
+    return type
+
+
+def VarArg(type=Any):
+    """A *args-style variadic positional argument"""
+    return type
+
+
+def KwArg(type=Any):
+    """A **kwargs-style variadic keyword argument"""
+    return type
+
+
+# Return type that indicates a function does not return
+class NoReturn: pass
diff --git a/extensions/setup.py b/extensions/setup.py
new file mode 100644
index 0000000..b0ffbc5
--- /dev/null
+++ b/extensions/setup.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+# NOTE: This package must support Python 2.7 in addition to Python 3.x
+
+from distutils.core import setup
+
+version = '0.3.0'
+description = 'Experimental type system extensions for programs checked with the mypy typechecker.'
+long_description = '''
+Mypy Extensions
+===============
+
+The "mypy_extensions" module defines experimental extensions to the
+standard "typing" module that are supported by the mypy typechecker.
+'''.lstrip()
+
+classifiers = [
+    'Development Status :: 2 - Pre-Alpha',
+    'Environment :: Console',
+    'Intended Audience :: Developers',
+    'License :: OSI Approved :: MIT License',
+    'Operating System :: POSIX',
+    'Programming Language :: Python :: 2',
+    'Programming Language :: Python :: 2.7',
+    'Programming Language :: Python :: 3',
+    'Programming Language :: Python :: 3.3',
+    'Programming Language :: Python :: 3.4',
+    'Programming Language :: Python :: 3.5',
+    'Programming Language :: Python :: 3.6',
+    'Topic :: Software Development',
+]
+
+setup(
+    name='mypy_extensions',
+    version=version,
+    description=description,
+    long_description=long_description,
+    author='The mypy developers',
+    author_email='jukka.lehtosalo at iki.fi',
+    url='http://www.mypy-lang.org/',
+    license='MIT License',
+    platforms=['POSIX'],
+    py_modules=['mypy_extensions'],
+    classifiers=classifiers,
+)
diff --git a/misc/actions_stubs.py b/misc/actions_stubs.py
new file mode 100644
index 0000000..978af71
--- /dev/null
+++ b/misc/actions_stubs.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python3
+import os
+import shutil
+from typing import Tuple, Any
+try:
+    import click
+except ImportError:
+    print("You need the module \'click\'")
+    exit(1)
+
+base_path = os.getcwd()
+
+# I don't know how to set callables with different args
+def apply_all(func: Any, directory: str, extension: str,
+            to_extension: str='', exclude: Tuple[str]=('',),
+            recursive: bool=True, debug: bool=False) -> None:
+    excluded = [x+extension for x in exclude] if exclude else []
+    for p, d, files in os.walk(os.path.join(base_path,directory)):
+        for f in files:
+            if "{}".format(f) in excluded:
+                continue
+            inner_path = os.path.join(p,f)
+            if not inner_path.endswith(extension):
+                continue
+            if to_extension:
+                new_path = "{}{}".format(inner_path[:-len(extension)],to_extension)
+                func(inner_path,new_path)
+            else:
+                func(inner_path)
+        if not recursive:
+            break
+
+def confirm(resp: bool=False, **kargs) -> bool:
+    kargs['rest'] = "to this {f2}/*{e2}".format(**kargs) if kargs.get('f2') else ''
+    prompt = "{act} all files {rec}matching this expression {f1}/*{e1} {rest}".format(**kargs)
+    prompt.format(**kargs)
+    prompt = "{} [{}]|{}: ".format(prompt, 'Y' if resp else 'N', 'n' if resp else 'y')
+    while True:
+        ans = input(prompt).lower()
+        if not ans:
+            return resp
+        if ans not in ['y','n']:
+            print( 'Please, enter (y) or (n).')
+            continue
+        if ans == 'y':
+            return True
+        else:
+            return False
+
+actions = ['cp', 'mv', 'rm']
+ at click.command(context_settings=dict(help_option_names=['-h', '--help']))
+ at click.option('--action', '-a', type=click.Choice(actions), required=True, help="What do I have to do :-)")
+ at click.option('--dir', '-d', 'directory', default='stubs', help="Directory to start search!")
+ at click.option('--ext', '-e', 'extension', default='.py', help="Extension \"from\" will be applied the action. Default .py")
+ at click.option('--to', '-t', 'to_extension', default='.pyi', help="Extension \"to\" will be applied the action if can. Default .pyi")
+ at click.option('--exclude', '-x', multiple=True, default=('__init__',), help="For every appear, will ignore this files. (can set multiples times)")
+ at click.option('--not-recursive', '-n', default=True, is_flag=True, help="Set if don't want to walk recursively.")
+def main(action: str, directory: str, extension: str, to_extension: str,
+    exclude: Tuple[str], not_recursive: bool) -> None:
+    """
+    This script helps to copy/move/remove files based on their extension.
+
+    The three actions will ask you for confirmation.
+
+    Examples (by default the script search in stubs directory):
+
+    - Change extension of all stubs from .py to .pyi:
+
+        python <script.py> -a mv
+
+    - Revert the previous action.
+
+        python <script.py> -a mv -e .pyi -t .py
+
+    - If you want to ignore "awesome.py" files.
+
+        python <script.py> -a [cp|mv|rm] -x awesome
+
+    - If you want to ignore "awesome.py" and "__init__.py" files.
+
+        python <script.py> -a [cp|mv|rm] -x awesome -x __init__
+
+    - If you want to remove all ".todo" files in "todo" directory, but not recursively:
+
+        python <script.py> -a rm -e .todo -d todo -r
+
+    """
+    if action not in actions:
+        print("Your action have to be one of this: {}".format(', '.join(actions)))
+        return
+
+    rec = "[Recursively] " if not_recursive else ''
+    if not extension.startswith('.'):
+        extension = ".{}".format(extension)
+    if not to_extension.startswith('.'):
+        to_extension = ".{}".format(to_extension)
+    if directory.endswith('/'):
+        directory = directory[:-1]
+    if action == 'cp':
+        if confirm(act='Copy',rec=rec, f1=directory, e1=extension, f2=directory, e2=to_extension):
+            apply_all(shutil.copy, directory, extension, to_extension, exclude, not_recursive)
+    elif action == 'rm':
+        if confirm(act='Remove',rec=rec, f1=directory, e1=extension):
+            apply_all(os.remove, directory, extension, exclude=exclude, recursive=not_recursive)
+    elif action == 'mv':
+        if confirm(act='Move',rec=rec, f1=directory, e1=extension, f2=directory, e2=to_extension):
+            apply_all(shutil.move, directory, extension, to_extension, exclude, not_recursive)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/misc/analyze_cache.py b/misc/analyze_cache.py
new file mode 100644
index 0000000..643e2bf
--- /dev/null
+++ b/misc/analyze_cache.py
@@ -0,0 +1,189 @@
+#!/usr/bin/env python
+
+from typing import Any, Dict, Generator, Iterable, List, Optional
+from collections import Counter
+
+import os
+import os.path
+import json
+
+ROOT = ".mypy_cache/3.5"
+
+JsonDict = Dict[str, Any]
+
+class CacheData:
+    def __init__(self, filename: str, data_json: JsonDict, meta_json: JsonDict,
+                 data_size: int, meta_size: int) -> None:
+        self.filename = filename
+        self.data = data_json
+        self.meta = meta_json
+        self.data_size = data_size
+        self.meta_size = meta_size
+
+    @property
+    def total_size(self):
+        return self.data_size + self.meta_size
+
+
+def extract_classes(chunks: Iterable[CacheData]) -> Iterable[JsonDict]:
+    def extract(chunks: Iterable[JsonDict]) -> Iterable[JsonDict]:
+        for chunk in chunks:
+            if isinstance(chunk, dict):
+                yield chunk
+                yield from extract(chunk.values())
+            elif isinstance(chunk, list):
+                yield from extract(chunk)
+    yield from extract([chunk.data for chunk in chunks])
+
+
+def load_json(data_path: str, meta_path: str) -> CacheData:
+    with open(data_path, 'r') as ds:
+        data_json = json.load(ds)
+
+    with open(meta_path, 'r') as ms:
+        meta_json = json.load(ms)
+
+    data_size = os.path.getsize(data_path)
+    meta_size = os.path.getsize(meta_path)
+
+    return CacheData(data_path.replace(".data.json", ".*.json"),
+                     data_json, meta_json, data_size, meta_size)
+
+
+def get_files(root: str) -> Iterable[CacheData]:
+    for (dirpath, dirnames, filenames) in os.walk(root):
+        for filename in filenames:
+            if filename.endswith(".data.json"):
+                meta_filename = filename.replace(".data.json", ".meta.json")
+                yield load_json(
+                        os.path.join(dirpath, filename),
+                        os.path.join(dirpath, meta_filename))
+
+
+def pluck(name: str, chunks: Iterable[JsonDict]) -> Iterable[JsonDict]:
+    return (chunk for chunk in chunks if chunk['.class'] == name)
+
+
+def report_counter(counter: Counter, amount: Optional[int] = None) -> None:
+    for name, count in counter.most_common(amount):
+        print('    {: <8} {}'.format(count, name))
+    print()
+
+
+def report_most_common(chunks: List[JsonDict], amount: Optional[int] = None) -> None:
+    report_counter(Counter(str(chunk) for chunk in chunks), amount)
+
+
+def compress(chunk: JsonDict) -> JsonDict:
+    cache = {}  # type: Dict[int, JsonDict]
+    counter = 0
+    def helper(chunk: Any) -> Any:
+        nonlocal counter
+        if not isinstance(chunk, dict):
+            return chunk
+
+        if len(chunk) <= 2:
+            return chunk
+        id = hash(str(chunk))
+
+        if id in cache:
+            return cache[id]
+        else:
+            cache[id] = {'.id': counter}
+            chunk['.cache_id'] = counter
+            counter += 1
+
+        for name in sorted(chunk.keys()):
+            value = chunk[name]
+            if isinstance(value, list):
+                chunk[name] = [helper(child) for child in value]
+            elif isinstance(value, dict):
+                chunk[name] = helper(value)
+
+        return chunk
+    out = helper(chunk)
+    return out
+
+def decompress(chunk: JsonDict) -> JsonDict:
+    cache = {}  # type: Dict[int, JsonDict]
+    def helper(chunk: Any) -> Any:
+        if not isinstance(chunk, dict):
+            return chunk
+        if '.id' in chunk:
+            return cache[chunk['.id']]
+
+        counter = None
+        if '.cache_id' in chunk:
+            counter = chunk['.cache_id']
+            del chunk['.cache_id']
+
+        for name in sorted(chunk.keys()):
+            value = chunk[name]
+            if isinstance(value, list):
+                chunk[name] = [helper(child) for child in value]
+            elif isinstance(value, dict):
+                chunk[name] = helper(value)
+
+        if counter is not None:
+            cache[counter] = chunk
+
+        return chunk
+    return helper(chunk)
+
+
+
+
+def main() -> None:
+    json_chunks = list(get_files(ROOT))
+    class_chunks = list(extract_classes(json_chunks))
+
+    total_size = sum(chunk.total_size for chunk in json_chunks)
+    print("Total cache size: {:.3f} megabytes".format(total_size / (1024 * 1024)))
+    print()
+
+    class_name_counter = Counter(chunk[".class"] for chunk in class_chunks)
+    print("Most commonly used classes:")
+    report_counter(class_name_counter)
+
+    print("Most common literal chunks:")
+    report_most_common(class_chunks, 15)
+
+    build = None
+    for chunk in json_chunks:
+        if 'build.*.json' in chunk.filename:
+            build = chunk
+            break
+    original = json.dumps(build.data, sort_keys=True)
+    print("Size of build.data.json, in kilobytes: {:.3f}".format(len(original) / 1024))
+
+    build.data = compress(build.data)
+    compressed = json.dumps(build.data, sort_keys=True)
+    print("Size of compressed build.data.json, in kilobytes: {:.3f}".format(len(compressed) / 1024))
+
+    build.data = decompress(build.data)
+    decompressed = json.dumps(build.data, sort_keys=True)
+    print("Size of decompressed build.data.json, in kilobytes: {:.3f}".format(len(decompressed) / 1024))
+
+    print("Lossless conversion back", original == decompressed)
+
+
+    '''var_chunks = list(pluck("Var", class_chunks))
+    report_most_common(var_chunks, 20)
+    print()
+
+    #for var in var_chunks:
+    #    if var['fullname'] == 'self' and not (isinstance(var['type'], dict) and var['type']['.class'] == 'AnyType'):
+    #        print(var)
+    #argument_chunks = list(pluck("Argument", class_chunks))
+
+    symbol_table_node_chunks = list(pluck("SymbolTableNode", class_chunks))
+    report_most_common(symbol_table_node_chunks, 20)
+
+    print()
+    print("Most common")
+    report_most_common(class_chunks, 20)
+    print()'''
+
+
+if __name__ == '__main__':
+    main()
diff --git a/misc/async_matrix.py b/misc/async_matrix.py
new file mode 100644
index 0000000..e9a758a
--- /dev/null
+++ b/misc/async_matrix.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+"""Test various combinations of generators/coroutines.
+
+This was used to cross-check the errors in the test case
+testFullCoroutineMatrix in test-data/unit/check-async-await.test.
+"""
+
+import sys
+from types import coroutine
+from typing import Any, AsyncIterator, Awaitable, Generator, Iterator
+
+# The various things you might try to use in `await` or `yield from`.
+
+def plain_generator() -> Generator[str, None, int]:
+    yield 'a'
+    return 1
+
+async def plain_coroutine() -> int:
+    return 1
+
+ at coroutine
+def decorated_generator() -> Generator[str, None, int]:
+    yield 'a'
+    return 1
+
+ at coroutine
+async def decorated_coroutine() -> int:
+    return 1
+
+class It(Iterator[str]):
+    stop = False
+    def __iter__(self) -> 'It':
+        return self
+    def __next__(self) -> str:
+        if self.stop:
+            raise StopIteration('end')
+        else:
+            self.stop = True
+            return 'a'
+
+def other_iterator() -> It:
+    return It()
+
+class Aw(Awaitable[int]):
+    def __await__(self) -> Generator[str, Any, int]:
+        yield 'a'
+        return 1
+
+def other_coroutine() -> Aw:
+    return Aw()
+
+# The various contexts in which `await` or `yield from` might occur.
+
+def plain_host_generator(func) -> Generator[str, None, None]:
+    yield 'a'
+    x = 0
+    f = func()
+    try:
+        x = yield from f
+    finally:
+        try:
+            f.close()
+        except AttributeError:
+            pass
+
+async def plain_host_coroutine(func) -> None:
+    x = 0
+    x = await func()
+
+ at coroutine
+def decorated_host_generator(func) -> Generator[str, None, None]:
+    yield 'a'
+    x = 0
+    f = func()
+    try:
+        x = yield from f
+    finally:
+        try:
+            f.close()
+        except AttributeError:
+            pass
+
+ at coroutine
+async def decorated_host_coroutine(func) -> None:
+    x = 0
+    x = await func()
+
+# Main driver.
+
+def main():
+    verbose = ('-v' in sys.argv)
+    for host in [plain_host_generator, plain_host_coroutine,
+                 decorated_host_generator, decorated_host_coroutine]:
+        print()
+        print("==== Host:", host.__name__)
+        for func in [plain_generator, plain_coroutine,
+                     decorated_generator, decorated_coroutine,
+                     other_iterator, other_coroutine]:
+            print("  ---- Func:", func.__name__)
+            try:
+                f = host(func)
+                for i in range(10):
+                    try:
+                        x = f.send(None)
+                        if verbose:
+                            print("    yield:", x)
+                    except StopIteration as e:
+                        if verbose:
+                            print("    stop:", e.value)
+                        break
+                else:
+                    if verbose:
+                        print("    ???? still going")
+            except Exception as e:
+                print("    error:", repr(e))
+
+# Run main().
+
+if __name__ == '__main__':
+    main()
diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py
new file mode 100644
index 0000000..0b552bf
--- /dev/null
+++ b/misc/fix_annotate.py
@@ -0,0 +1,219 @@
+"""Fixer for lib2to3 that inserts mypy annotations into all methods.
+
+The simplest way to run this is to copy it into lib2to3's "fixes"
+subdirectory and then run "2to3 -f annotate" over your files.
+
+The fixer transforms e.g.
+
+  def foo(self, bar, baz=12):
+      return bar + baz
+
+into
+
+  def foo(self, bar, baz=12):
+      # type: (Any, int) -> Any
+      return bar + baz
+
+It does not do type inference but it recognizes some basic default
+argument values such as numbers and strings (and assumes their type
+implies the argument type).
+
+It also uses some basic heuristics to decide whether to ignore the
+first argument:
+
+  - always if it's named 'self'
+  - if there's a @classmethod decorator
+
+Finally, it knows that __init__() is supposed to return None.
+"""
+
+from __future__ import print_function
+
+import os
+import re
+
+from lib2to3.fixer_base import BaseFix
+from lib2to3.patcomp import compile_pattern
+from lib2to3.pytree import Leaf, Node
+from lib2to3.fixer_util import token, syms, touch_import
+
+
+class FixAnnotate(BaseFix):
+
+    # This fixer is compatible with the bottom matcher.
+    BM_compatible = True
+
+    # This fixer shouldn't run by default.
+    explicit = True
+
+    # The pattern to match.
+    PATTERN = """
+              funcdef< 'def' name=any parameters< '(' [args=any] ')' > ':' suite=any+ >
+              """
+
+    counter = None if not os.getenv('MAXFIXES') else int(os.getenv('MAXFIXES'))
+
+    def transform(self, node, results):
+        if FixAnnotate.counter is not None:
+            if FixAnnotate.counter <= 0:
+                return
+        suite = results['suite']
+        children = suite[0].children
+
+        # NOTE: I've reverse-engineered the structure of the parse tree.
+        # It's always a list of nodes, the first of which contains the
+        # entire suite.  Its children seem to be:
+        #
+        #   [0] NEWLINE
+        #   [1] INDENT
+        #   [2...n-2] statements (the first may be a docstring)
+        #   [n-1] DEDENT
+        #
+        # Comments before the suite are part of the INDENT's prefix.
+        #
+        # "Compact" functions (e.g. "def foo(x, y): return max(x, y)")
+        # have a different structure that isn't matched by PATTERN.
+
+        ## print('-'*60)
+        ## print(node)
+        ## for i, ch in enumerate(children):
+        ##     print(i, repr(ch.prefix), repr(ch))
+
+        # Check if there's already an annotation.
+        for ch in children:
+            if ch.prefix.lstrip().startswith('# type:'):
+                return  # There's already a # type: comment here; don't change anything.
+
+        # Compute the annotation
+        annot = self.make_annotation(node, results)
+
+        # Insert '# type: {annot}' comment.
+        # For reference, see lib2to3/fixes/fix_tuple_params.py in stdlib.
+        if len(children) >= 2 and children[1].type == token.INDENT:
+            children[1].prefix = '%s# type: %s\n%s' % (children[1].value, annot, children[1].prefix)
+            children[1].changed()
+            if FixAnnotate.counter is not None:
+                FixAnnotate.counter -= 1
+
+        # Also add 'from typing import Any' at the top.
+        if 'Any' in annot:
+            touch_import('typing', 'Any', node)
+
+    def make_annotation(self, node, results):
+        name = results['name']
+        assert isinstance(name, Leaf), repr(name)
+        assert name.type == token.NAME, repr(name)
+        decorators = self.get_decorators(node)
+        is_method = self.is_method(node)
+        if name.value == '__init__' or not self.has_return_exprs(node):
+            restype = 'None'
+        else:
+            restype = 'Any'
+        args = results.get('args')
+        argtypes = []
+        if isinstance(args, Node):
+            children = args.children
+        elif isinstance(args, Leaf):
+            children = [args]
+        else:
+            children = []
+        # Interpret children according to the following grammar:
+        # (('*'|'**')? NAME ['=' expr] ','?)*
+        stars = inferred_type = ''
+        in_default = False
+        at_start = True
+        for child in children:
+            if isinstance(child, Leaf):
+                if child.value in ('*', '**'):
+                    stars += child.value
+                elif child.type == token.NAME and not in_default:
+                    if not is_method or not at_start or 'staticmethod' in decorators:
+                        inferred_type = 'Any'
+                    else:
+                        # Always skip the first argument if it's named 'self'.
+                        # Always skip the first argument of a class method.
+                        if  child.value == 'self' or 'classmethod' in decorators:
+                            pass
+                        else:
+                            inferred_type = 'Any'
+                elif child.value == '=':
+                    in_default = True
+                elif in_default and child.value != ',':
+                    if child.type == token.NUMBER:
+                        if re.match(r'\d+[lL]?$', child.value):
+                            inferred_type = 'int'
+                        else:
+                            inferred_type = 'float'  # TODO: complex?
+                    elif child.type == token.STRING:
+                        if child.value.startswith(('u', 'U')):
+                            inferred_type = 'unicode'
+                        else:
+                            inferred_type = 'str'
+                    elif child.type == token.NAME and child.value in ('True', 'False'):
+                        inferred_type = 'bool'
+                elif child.value == ',':
+                    if inferred_type:
+                        argtypes.append(stars + inferred_type)
+                    # Reset
+                    stars = inferred_type = ''
+                    in_default = False
+                    at_start = False
+        if inferred_type:
+            argtypes.append(stars + inferred_type)
+        return '(' + ', '.join(argtypes) + ') -> ' + restype
+
+    # The parse tree has a different shape when there is a single
+    # decorator vs. when there are multiple decorators.
+    DECORATED = "decorated< (d=decorator | decorators< dd=decorator+ >) funcdef >"
+    decorated = compile_pattern(DECORATED)
+
+    def get_decorators(self, node):
+        """Return a list of decorators found on a function definition.
+
+        This is a list of strings; only simple decorators
+        (e.g. @staticmethod) are returned.
+
+        If the function is undecorated or only non-simple decorators
+        are found, return [].
+        """
+        if node.parent is None:
+            return []
+        results = {}
+        if not self.decorated.match(node.parent, results):
+            return []
+        decorators = results.get('dd') or [results['d']]
+        decs = []
+        for d in decorators:
+            for child in d.children:
+                if isinstance(child, Leaf) and child.type == token.NAME:
+                    decs.append(child.value)
+        return decs
+
+    def is_method(self, node):
+        """Return whether the node occurs (directly) inside a class."""
+        node = node.parent
+        while node is not None:
+            if node.type == syms.classdef:
+                return True
+            if node.type == syms.funcdef:
+                return False
+            node = node.parent
+        return False
+
+    RETURN_EXPR = "return_stmt< 'return' any >"
+    return_expr = compile_pattern(RETURN_EXPR)
+
+    def has_return_exprs(self, node):
+        """Traverse the tree below node looking for 'return expr'.
+
+        Return True if at least 'return expr' is found, False if not.
+        (If both 'return' and 'return expr' are found, return True.)
+        """
+        results = {}
+        if self.return_expr.match(node, results):
+            return True
+        for child in node.children:
+            if child.type not in (syms.funcdef, syms.classdef):
+                if self.has_return_exprs(child):
+                    return True
+        return False
diff --git a/misc/incremental_checker.py b/misc/incremental_checker.py
new file mode 100755
index 0000000..515e662
--- /dev/null
+++ b/misc/incremental_checker.py
@@ -0,0 +1,356 @@
+#!/usr/bin/env python3
+"""
+This file compares the output and runtime of running normal vs incremental mode
+on the history of any arbitrary git repo as a way of performing a sanity check
+to make sure incremental mode is working correctly and efficiently.
+
+It does so by first running mypy without incremental mode on the specified range
+of commits to find the expected result, then rewinds back to the first commit and
+re-runs mypy on the commits with incremental mode enabled to make sure it returns
+the same results.
+
+This script will download and test the offical mypy repo by default. Running:
+
+    python3 misc/incremental_checker.py last 30
+
+is equivalent to running
+
+    python3 misc/incremental_checker.py last 30 \\
+            --repo_url https://github.com/python/mypy.git \\
+            --file-path mypy
+
+You can chose to run this script against a specific commit id or against the
+last n commits.
+
+To run this script against the last 30 commits:
+
+    python3 misc/incremental_checker.py last 30
+
+To run this script starting from the commit id 2a432b:
+
+    python3 misc/incremental_checker.py commit 2a432b
+"""
+
+from typing import Any, Dict, List, Optional, Tuple
+
+from argparse import (ArgumentParser, RawDescriptionHelpFormatter,
+                      ArgumentDefaultsHelpFormatter, Namespace)
+import base64
+import json
+import os
+import random
+import shutil
+import subprocess
+import sys
+import textwrap
+import time
+
+
+CACHE_PATH = ".incremental_checker_cache.json"
+MYPY_REPO_URL = "https://github.com/python/mypy.git"
+MYPY_TARGET_FILE = "mypy"
+
+JsonDict = Dict[str, Any]
+
+
+def print_offset(text: str, indent_length: int = 4) -> None:
+    print()
+    print(textwrap.indent(text, ' ' * indent_length))
+    print()
+
+
+def delete_folder(folder_path: str) -> None:
+    if os.path.exists(folder_path):
+        shutil.rmtree(folder_path)
+
+
+def execute(command: List[str], fail_on_error: bool = True) -> Tuple[str, str, int]:
+    proc = subprocess.Popen(
+        ' '.join(command),
+        stderr=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        shell=True)
+    stdout_bytes, stderr_bytes = proc.communicate()  # type: Tuple[bytes, bytes]
+    stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
+    if fail_on_error and proc.returncode != 0:
+        print('EXECUTED COMMAND:', repr(command))
+        print('RETURN CODE:', proc.returncode)
+        print()
+        print('STDOUT:')
+        print_offset(stdout)
+        print('STDERR:')
+        print_offset(stderr)
+        raise RuntimeError('Unexpected error from external tool.')
+    return stdout, stderr, proc.returncode
+
+
+def ensure_environment_is_ready(mypy_path: str, temp_repo_path: str, mypy_cache_path: str) -> None:
+    os.chdir(mypy_path)
+    delete_folder(temp_repo_path)
+    delete_folder(mypy_cache_path)
+
+
+def initialize_repo(repo_url: str, temp_repo_path: str, branch: str) -> None:
+    print("Cloning repo {0} to {1}".format(repo_url, temp_repo_path))
+    execute(["git", "clone", repo_url, temp_repo_path])
+    if branch is not None:
+        print("Checking out branch {}".format(branch))
+        execute(["git", "-C", temp_repo_path, "checkout", branch])
+
+
+def get_commits(repo_folder_path: str, commit_range: str) -> List[Tuple[str, str]]:
+    raw_data, _stderr, _errcode = execute([
+        "git", "-C", repo_folder_path, "log", "--reverse", "--oneline", commit_range])
+    output = []
+    for line in raw_data.strip().split('\n'):
+        commit_id, _, message = line.partition(' ')
+        output.append((commit_id, message))
+    return output
+
+
+def get_commits_starting_at(repo_folder_path: str, start_commit: str) -> List[Tuple[str, str]]:
+    print("Fetching commits starting at {0}".format(start_commit))
+    return get_commits(repo_folder_path, '{0}^..HEAD'.format(start_commit))
+
+
+def get_nth_commit(repo_folder_path, n: int) -> Tuple[str, str]:
+    print("Fetching last {} commits (or all, if there are fewer commits than n)".format(n))
+    return get_commits(repo_folder_path, '-{}'.format(n))[0]
+
+
+def run_mypy(target_file_path: Optional[str],
+             mypy_cache_path: str,
+             mypy_script: Optional[str],
+             incremental: bool = True,
+             verbose: bool = False) -> Tuple[float, str]:
+    """Runs mypy against `target_file_path` and returns what mypy prints to stdout as a string.
+
+    If `incremental` is set to True, this function will use store and retrieve all caching data
+    inside `mypy_cache_path`. If `verbose` is set to True, this function will pass the "-v -v"
+    flags to mypy to make it output debugging information.
+    """
+    if mypy_script is None:
+        command = ["python3", "-m", "mypy"]
+    else:
+        command = [mypy_script]
+    command.extend(["--cache-dir", mypy_cache_path])
+    if incremental:
+        command.append("--incremental")
+    if verbose:
+        command.extend(["-v", "-v"])
+    if target_file_path is not None:
+        command.append(target_file_path)
+    start = time.time()
+    output, stderr, _ = execute(command, False)
+    if stderr != "":
+        output = stderr
+    runtime = time.time() - start
+    return runtime, output
+
+
+def load_cache(incremental_cache_path: str = CACHE_PATH) -> JsonDict:
+    if os.path.exists(incremental_cache_path):
+        with open(incremental_cache_path, 'r') as stream:
+            return json.load(stream)
+    else:
+        return {}
+
+
+def save_cache(cache: JsonDict, incremental_cache_path: str = CACHE_PATH) -> None:
+    with open(incremental_cache_path, 'w') as stream:
+        json.dump(cache, stream, indent=2)
+
+
+def set_expected(commits: List[Tuple[str, str]],
+                 cache: JsonDict,
+                 temp_repo_path: str,
+                 target_file_path: Optional[str],
+                 mypy_cache_path: str,
+                 mypy_script: Optional[str]) -> None:
+    """Populates the given `cache` with the expected results for all of the given `commits`.
+
+    This function runs mypy on the `target_file_path` inside the `temp_repo_path`, and stores
+    the result in the `cache`.
+
+    If `cache` already contains results for a particular commit, this function will
+    skip evaluating that commit and move on to the next."""
+    for commit_id, message in commits:
+        if commit_id in cache:
+            print('Skipping commit (already cached): {0}: "{1}"'.format(commit_id, message))
+        else:
+            print('Caching expected output for commit {0}: "{1}"'.format(commit_id, message))
+            execute(["git", "-C", temp_repo_path, "checkout", commit_id])
+            runtime, output = run_mypy(target_file_path, mypy_cache_path, mypy_script,
+                                       incremental=False)
+            cache[commit_id] = {'runtime': runtime, 'output': output}
+            if output == "":
+                print("    Clean output ({:.3f} sec)".format(runtime))
+            else:
+                print("    Output ({:.3f} sec)".format(runtime))
+                print_offset(output, 8)
+    print()
+
+
+def test_incremental(commits: List[Tuple[str, str]],
+                     cache: JsonDict,
+                     temp_repo_path: str,
+                     target_file_path: Optional[str],
+                     mypy_cache_path: str,
+                     mypy_script: Optional[str]) -> None:
+    """Runs incremental mode on all `commits` to verify the output matches the expected output.
+
+    This function runs mypy on the `target_file_path` inside the `temp_repo_path`. The
+    expected output must be stored inside of the given `cache`.
+    """
+    print("Note: first commit is evaluated twice to warm up cache")
+    commits = [commits[0]] + commits
+    for commit_id, message in commits:
+        print('Now testing commit {0}: "{1}"'.format(commit_id, message))
+        execute(["git", "-C", temp_repo_path, "checkout", commit_id])
+        runtime, output = run_mypy(target_file_path, mypy_cache_path, mypy_script,
+                                   incremental=True)
+        expected_runtime = cache[commit_id]['runtime']  # type: float
+        expected_output = cache[commit_id]['output']  # type: str
+        if output != expected_output:
+            print("    Output does not match expected result!")
+            print("    Expected output ({:.3f} sec):".format(expected_runtime))
+            print_offset(expected_output, 8)
+            print("    Actual output: ({:.3f} sec):".format(runtime))
+            print_offset(output, 8)
+        else:
+            print("    Output matches expected result!")
+            print("    Incremental: {:.3f} sec".format(runtime))
+            print("    Original:    {:.3f} sec".format(expected_runtime))
+
+
+def cleanup(temp_repo_path: str, mypy_cache_path: str) -> None:
+    delete_folder(temp_repo_path)
+    delete_folder(mypy_cache_path)
+
+
+def test_repo(target_repo_url: str, temp_repo_path: str,
+              target_file_path: Optional[str],
+              mypy_path: str, incremental_cache_path: str, mypy_cache_path: str,
+              range_type: str, range_start: str, branch: str,
+              params: Optional[Namespace] = None) -> None:
+    """Tests incremental mode against the repo specified in `target_repo_url`.
+
+    This algorithm runs in five main stages:
+
+    1.  Clones `target_repo_url` into the `temp_repo_path` folder locally,
+        checking out the specified `branch` if applicable.
+    2.  Examines the repo's history to get the list of all commits to
+        to test incremental mode on.
+    3.  Runs mypy WITHOUT incremental mode against the `target_file_path` (which is
+        assumed to be located inside the `temp_repo_path`), testing each commit
+        discovered in stage two.
+        -   If the results of running mypy WITHOUT incremental mode on a
+            particular commit are already cached inside the `incremental_cache_path`,
+            skip that commit to save time.
+        -   Cache the results after finishing.
+    4.  Rewind back to the first commit, and run mypy WITH incremental mode
+        against the `target_file_path` commit-by-commit, and compare to the expected
+        results found in stage 3.
+    5.  Delete all unnecessary temp files.
+    """
+    # Stage 1: Clone repo and get ready to being testing
+    ensure_environment_is_ready(mypy_path, temp_repo_path, mypy_cache_path)
+    initialize_repo(target_repo_url, temp_repo_path, branch)
+
+    # Stage 2: Get all commits we want to test
+    if range_type == "last":
+        start_commit = get_nth_commit(temp_repo_path, int(range_start))[0]
+    elif range_type == "commit":
+        start_commit = range_start
+    else:
+        raise RuntimeError("Invalid option: {}".format(range_type))
+    commits = get_commits_starting_at(temp_repo_path, start_commit)
+    if params is not None and params.sample:
+        seed = params.seed or base64.urlsafe_b64encode(os.urandom(15)).decode('ascii')
+        random.seed(seed)
+        commits = random.sample(commits, params.sample)
+        print("Sampled down to %d commits using random seed %s" % (len(commits), seed))
+
+    # Stage 3: Find and cache expected results for each commit (without incremental mode)
+    cache = load_cache(incremental_cache_path)
+    set_expected(commits, cache, temp_repo_path, target_file_path, mypy_cache_path,
+                 mypy_script=params.mypy_script)
+    save_cache(cache, incremental_cache_path)
+
+    # Stage 4: Rewind and re-run mypy (with incremental mode enabled)
+    test_incremental(commits, cache, temp_repo_path, target_file_path, mypy_cache_path,
+                     mypy_script=params.mypy_script)
+
+    # Stage 5: Remove temp files
+    cleanup(temp_repo_path, mypy_cache_path)
+
+
+def main() -> None:
+    help_factory = (lambda prog: RawDescriptionHelpFormatter(prog=prog, max_help_position=32))
+    parser = ArgumentParser(
+        prog='incremental_checker',
+        description=__doc__,
+        formatter_class=help_factory)
+
+    parser.add_argument("range_type", metavar="START_TYPE", choices=["last", "commit"],
+                        help="must be one of 'last' or 'commit'")
+    parser.add_argument("range_start", metavar="COMMIT_ID_OR_NUMBER",
+                        help="the commit id to start from, or the number of "
+                        "commits to move back (see above)")
+    parser.add_argument("-r", "--repo_url", default=MYPY_REPO_URL, metavar="URL",
+                        help="the repo to clone and run tests on")
+    parser.add_argument("-f", "--file-path", default=MYPY_TARGET_FILE, metavar="FILE",
+                        help="the name of the file or directory to typecheck")
+    parser.add_argument("--cache-path", default=CACHE_PATH, metavar="DIR",
+                        help="sets a custom location to store cache data")
+    parser.add_argument("--branch", default=None, metavar="NAME",
+                        help="check out and test a custom branch"
+                        "uses the default if not specified")
+    parser.add_argument("--sample", type=int, help="use a random sample of size SAMPLE")
+    parser.add_argument("--seed", type=str, help="random seed")
+    parser.add_argument("--mypy-script", type=str, help="alternate mypy script to run")
+
+    if len(sys.argv[1:]) == 0:
+        parser.print_help()
+        parser.exit()
+
+    params = parser.parse_args(sys.argv[1:])
+
+    # Make all paths absolute so we avoid having to worry about being in the right folder
+
+    # The path to this specific script (incremental_checker.py).
+    script_path = os.path.abspath(sys.argv[0])
+
+    # The path to the mypy repo.
+    mypy_path = os.path.abspath(os.path.dirname(os.path.dirname(script_path)))
+
+    # The folder the cloned repo will reside in.
+    temp_repo_path = os.path.abspath(os.path.join(mypy_path, "tmp_repo"))
+
+    # The particular file or package to typecheck inside the repo.
+    if params.file_path:
+        target_file_path = os.path.abspath(os.path.join(temp_repo_path, params.file_path))
+    else:
+        # Allow `-f ''` to clear target_file_path.
+        target_file_path = None
+
+    # The path to where the incremental checker cache data is stored.
+    incremental_cache_path = os.path.abspath(params.cache_path)
+
+    # The path to store the mypy incremental mode cache data
+    mypy_cache_path = os.path.abspath(os.path.join(mypy_path, "misc", ".mypy_cache"))
+
+    print("Assuming mypy is located at {0}".format(mypy_path))
+    print("Temp repo will be cloned at {0}".format(temp_repo_path))
+    print("Testing file/dir located at {0}".format(target_file_path))
+    print("Using cache data located at {0}".format(incremental_cache_path))
+    print()
+
+    test_repo(params.repo_url, temp_repo_path, target_file_path,
+              mypy_path, incremental_cache_path, mypy_cache_path,
+              params.range_type, params.range_start, params.branch,
+              params)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/misc/macs.el b/misc/macs.el
new file mode 100644
index 0000000..67d80aa
--- /dev/null
+++ b/misc/macs.el
@@ -0,0 +1,22 @@
+; Example Emacs integration; shows type of expression in region.
+
+(defun mypy-show-region ()
+  "Show type of variable at point."
+  (interactive)
+  (let ((here (region-beginning))
+        (there (region-end))
+        (filename (buffer-file-name)))
+    (let ((hereline (line-number-at-pos here))
+          (herecol (save-excursion (goto-char here) (current-column)))
+          (thereline (line-number-at-pos there))
+          (therecol (save-excursion (goto-char there) (current-column))))
+      (shell-command
+       (format "cd ~/src/mypy; python3 ./scripts/find_type.py %s %s %s %s %s python3 -m mypy -i mypy"
+               filename hereline herecol thereline therecol)
+       )
+      )
+    )
+  )
+
+; I like to bind this to ^X-t.
+(global-set-key "\C-xt" 'mypy-show-region)
diff --git a/misc/perf_checker.py b/misc/perf_checker.py
new file mode 100644
index 0000000..e55f8cc
--- /dev/null
+++ b/misc/perf_checker.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+
+from typing import Callable, List, Tuple
+
+import os
+import shutil
+import statistics
+import subprocess
+import textwrap
+import time
+
+
+class Command:
+    def __init__(self, setup: Callable[[], None], command: Callable[[], None]) -> None:
+        self.setup = setup
+        self.command = command
+
+
+def print_offset(text: str, indent_length: int = 4) -> None:
+    print()
+    print(textwrap.indent(text, ' ' * indent_length))
+    print()
+
+
+def delete_folder(folder_path: str) -> None:
+    if os.path.exists(folder_path):
+        shutil.rmtree(folder_path)
+
+
+def execute(command: List[str]) -> None:
+    proc = subprocess.Popen(
+        ' '.join(command),
+        stderr=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        shell=True)
+    stdout_bytes, stderr_bytes = proc.communicate()  # type: Tuple[bytes, bytes]
+    stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
+    if proc.returncode != 0:
+        print('EXECUTED COMMAND:', repr(command))
+        print('RETURN CODE:', proc.returncode)
+        print()
+        print('STDOUT:')
+        print_offset(stdout)
+        print('STDERR:')
+        print_offset(stderr)
+        raise RuntimeError('Unexpected error from external tool.')
+
+
+def trial(num_trials: int, command: Command) -> List[float]:
+    trials = []
+    for i in range(num_trials):
+        command.setup()
+        start = time.time()
+        command.command()
+        delta = time.time() - start
+        trials.append(delta)
+    return trials
+
+
+def report(name: str, times: List[float]) -> None:
+    print("{}:".format(name))
+    print("  Times: {}".format(times))
+    print("  Mean:  {}".format(statistics.mean(times)))
+    print("  Stdev: {}".format(statistics.stdev(times)))
+    print()
+
+
+def main() -> None:
+    trials = 3
+
+    print("Testing baseline")
+    baseline = trial(trials, Command(
+        lambda: None,
+        lambda: execute(["python3", "-m", "mypy", "mypy"])))
+    report("Baseline", baseline)
+
+    print("Testing cold cache")
+    cold_cache = trial(trials, Command(
+        lambda: delete_folder(".mypy_cache"),
+        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"])))
+    report("Cold cache", cold_cache)
+
+    print("Testing warm cache")
+    execute(["python3", "-m", "mypy", "-i", "mypy"])
+    warm_cache = trial(trials, Command(
+        lambda: None,
+        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"])))
+    report("Warm cache", warm_cache)
+
+
+if __name__ == '__main__':
+    main()
+
diff --git a/misc/remove-eol-whitespace.sh b/misc/remove-eol-whitespace.sh
new file mode 100644
index 0000000..3da6b9d
--- /dev/null
+++ b/misc/remove-eol-whitespace.sh
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+# Remove trailing whitespace from all non-binary files in a git repo.
+
+# From https://gist.github.com/dpaluy/3690668; originally from here:
+# http://unix.stackexchange.com/questions/36233/how-to-skip-file-in-sed-if-it-contains-regex/36240#36240
+
+git grep -I --name-only -z -e '' | xargs -0 sed -i -e 's/[ \t]\+\(\r\?\)$/\1/'
diff --git a/misc/test_case_to_actual.py b/misc/test_case_to_actual.py
new file mode 100644
index 0000000..9a91bb1
--- /dev/null
+++ b/misc/test_case_to_actual.py
@@ -0,0 +1,71 @@
+from typing import Iterator, List
+import sys
+import os
+import os.path
+
+
+class Chunk:
+    def __init__(self, header_type: str, args: str) -> None:
+        self.header_type = header_type
+        self.args = args
+        self.lines = []  # type: List[str]
+
+
+def is_header(line: str) -> bool:
+    return line.startswith('[') and line.endswith(']')
+
+
+def normalize(lines: Iterator[str]) -> Iterator[str]:
+    return (line.rstrip() for line in lines)
+
+
+def produce_chunks(lines: Iterator[str]) -> Iterator[Chunk]:
+    current_chunk = None  # type: Chunk
+    for line in normalize(lines):
+        if is_header(line):
+            if current_chunk is not None:
+                yield current_chunk
+            parts = line[1:-1].split(' ', 1)
+            args = parts[1] if len(parts) > 1 else ''
+            current_chunk = Chunk(parts[0], args)
+        else:
+            current_chunk.lines.append(line)
+    if current_chunk is not None:
+        yield current_chunk
+
+
+def write_out(filename: str, lines: List[str]) -> None:
+    os.makedirs(os.path.dirname(filename), exist_ok=True)
+    with open(filename, 'w') as stream:
+        stream.write('\n'.join(lines))
+
+
+def write_tree(root: str, chunks: Iterator[Chunk]) -> None:
+    init = next(chunks)
+    assert init.header_type == 'case'
+    
+    root = os.path.join(root, init.args)
+    write_out(os.path.join(root, 'main.py'), init.lines)
+
+    for chunk in chunks:
+        if chunk.header_type == 'file' and chunk.args.endswith('.py'):
+            write_out(os.path.join(root, chunk.args), chunk.lines)
+
+
+def help() -> None:
+    print("Usage: python misc/test_case_to_actual.py test_file.txt root_path")
+
+
+def main() -> None:
+    if len(sys.argv) != 3:
+        help()
+        return
+
+    test_file_path, root_path = sys.argv[1], sys.argv[2]
+    with open(test_file_path, 'r') as stream:
+        chunks = produce_chunks(iter(stream))
+        write_tree(root_path, chunks)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/misc/touch_checker.py b/misc/touch_checker.py
new file mode 100644
index 0000000..c44afe4
--- /dev/null
+++ b/misc/touch_checker.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+
+from typing import Callable, List, Tuple, Optional
+
+import sys
+import glob
+import os
+import shutil
+import statistics
+import subprocess
+import textwrap
+import time
+
+
+def print_offset(text: str, indent_length: int = 4) -> None:
+    print()
+    print(textwrap.indent(text, ' ' * indent_length))
+    print()
+
+
+def delete_folder(folder_path: str) -> None:
+    if os.path.exists(folder_path):
+        shutil.rmtree(folder_path)
+
+
+def execute(command: List[str]) -> None:
+    proc = subprocess.Popen(
+        ' '.join(command),
+        stderr=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        shell=True)
+    stdout_bytes, stderr_bytes = proc.communicate()  # type: Tuple[bytes, bytes]
+    stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
+    if proc.returncode != 0:
+        print('EXECUTED COMMAND:', repr(command))
+        print('RETURN CODE:', proc.returncode)
+        print()
+        print('STDOUT:')
+        print_offset(stdout)
+        print('STDERR:')
+        print_offset(stderr)
+        print()
+
+
+Command = Callable[[], None]
+
+
+def test(setup: Command, command: Command, teardown: Command) -> float:
+    setup()
+    start = time.time()
+    command()
+    end = time.time() - start
+    teardown()
+    return end
+
+
+def make_touch_wrappers(filename: str) -> Tuple[Command, Command]:
+    def setup() -> None:
+        execute(["touch", filename])
+    def teardown() -> None:
+        pass
+    return setup, teardown
+
+
+def make_change_wrappers(filename: str) -> Tuple[Command, Command]:
+    copy = None  # type: Optional[str]
+
+    def setup() -> None:
+        nonlocal copy
+        with open(filename, 'r') as stream:
+            copy = stream.read()
+        with open(filename, 'a') as stream:
+            stream.write('\n\nfoo = 3')
+
+    def teardown() -> None:
+        assert copy is not None
+        with open(filename, 'w') as stream:
+            stream.write(copy)
+
+        # Re-run to reset cache
+        execute(["python3", "-m", "mypy", "-i", "mypy"]),
+
+    return setup, teardown
+
+def main() -> None:
+    if len(sys.argv) != 2 or sys.argv[1] not in {'touch', 'change'}:
+        print("First argument should be 'touch' or 'change'")
+        return
+
+    if sys.argv[1] == 'touch':
+        make_wrappers = make_touch_wrappers
+        verb = "Touching"
+    elif sys.argv[1] == 'change':
+        make_wrappers = make_change_wrappers
+        verb = "Changing"
+    else:
+        raise AssertionError()
+
+    print("Setting up...")
+
+    baseline = test(
+        lambda: None,
+        lambda: execute(["python3", "-m", "mypy", "mypy"]),
+        lambda: None)
+    print("Baseline:   {}".format(baseline))
+
+    cold = test(
+        lambda: delete_folder(".mypy_cache"),
+        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
+        lambda: None)
+    print("Cold cache: {}".format(cold))
+
+    warm = test(
+        lambda: None,
+        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
+        lambda: None)
+    print("Warm cache: {}".format(warm))
+
+    print()
+
+    deltas = []
+    for filename in glob.iglob("mypy/**/*.py", recursive=True):
+        print("{} {}".format(verb, filename))
+        
+        setup, teardown = make_wrappers(filename)
+        delta = test(
+            setup,
+            lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
+            teardown)
+        print("    Time: {}".format(delta))
+        deltas.append(delta)
+    print()
+
+    print("Initial:")
+    print("    Baseline:   {}".format(baseline))
+    print("    Cold cache: {}".format(cold))
+    print("    Warm cache: {}".format(warm))
+    print()
+    print("Aggregate:")
+    print("    Times:      {}".format(deltas))
+    print("    Mean:       {}".format(statistics.mean(deltas)))
+    print("    Median:     {}".format(statistics.median(deltas)))
+    print("    Stdev:      {}".format(statistics.stdev(deltas)))
+    print("    Min:        {}".format(min(deltas)))
+    print("    Max:        {}".format(max(deltas)))
+    print("    Total:      {}".format(sum(deltas)))
+    print()
+
+if __name__ == '__main__':
+    main()
+
diff --git a/misc/upload-pypi.py b/misc/upload-pypi.py
new file mode 100644
index 0000000..1b1e956
--- /dev/null
+++ b/misc/upload-pypi.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python3
+"""Build and upload mypy packages for Linux and macOS to PyPI.
+
+Note: This should be run on macOS using offical python.org Python 3.6 or
+      later, as this is the only tested configuration. Use --force to
+      run anyway.
+
+This uses a fresh repo clone and a fresh virtualenv to avoid depending on
+local state.
+
+Ideas for improvements:
+
+- also upload Windows wheels
+- try installing the generated packages and running mypy
+- try installing the uploaded packages and running mypy
+- run tests
+- verify that there is a green travis build
+
+"""
+
+import argparse
+import getpass
+import os
+import os.path
+import re
+import subprocess
+import sys
+import tempfile
+from typing import Any
+
+
+class Builder:
+    def __init__(self, version: str, force: bool, no_upload: bool) -> None:
+        if not re.match(r'0\.[0-9]{3}$', version):
+            sys.exit('Invalid version {!r} (expected form 0.123)'.format(version))
+        self.version = version
+        self.force = force
+        self.no_upload = no_upload
+        self.target_dir = tempfile.mkdtemp()
+        self.repo_dir = os.path.join(self.target_dir, 'mypy')
+
+    def build_and_upload(self) -> None:
+        self.prompt()
+        self.run_sanity_checks()
+        print('Temporary target directory: {}'.format(self.target_dir))
+        self.git_clone_repo()
+        self.git_check_out_tag()
+        self.verify_version()
+        self.make_virtualenv()
+        self.install_dependencies()
+        self.make_wheel()
+        self.make_sdist()
+        if not self.no_upload:
+            self.upload_wheel()
+            self.upload_sdist()
+            self.heading('Successfully uploaded wheel and sdist for mypy {}'.format(self.version))
+            print("<< Don't forget to upload Windows wheels! >>")
+        else:
+            self.heading('Successfully built wheel and sdist for mypy {}'.format(self.version))
+            dist_dir = os.path.join(self.repo_dir, 'dist')
+            print('Generated packages:'.format(dist_dir))
+            for fnam in sorted(os.listdir(dist_dir)):
+                print('  {}'.format(os.path.join(dist_dir, fnam)))
+
+    def prompt(self) -> None:
+        if self.force:
+            return
+        extra = '' if self.no_upload else ' and upload'
+        print('This will build{} PyPI packages for mypy {}.'.format(extra, self.version))
+        response = input('Proceeed? [yN] '.format(self.version))
+        if response.lower() != 'y':
+            sys.exit('Exiting')
+
+    def verify_version(self) -> None:
+        version_path = os.path.join(self.repo_dir, 'mypy', 'version.py')
+        with open(version_path) as f:
+            contents = f.read()
+        if "'{}'".format(self.version) not in contents:
+            sys.stderr.write(
+                '\nError: Version {} does not match {}/mypy/version.py\n'.format(
+                self.version, self.repo_dir))
+            sys.exit(2)
+
+    def run_sanity_checks(self) -> None:
+        if not sys.version_info >= (3, 6):
+            sys.exit('You must use Python 3.6 or later to build mypy')
+        if sys.platform != 'darwin' and not self.force:
+            sys.exit('You should run this on macOS; use --force to go ahead anyway')
+        os_file = os.path.realpath(os.__file__)
+        if not os_file.startswith('/Library/Frameworks') and not self.force:
+            # Be defensive -- Python from brew may produce bad packages, for example.
+            sys.exit('Error -- run this script using an official Python build from python.org')
+        if getpass.getuser() == 'root':
+            sys.exit('This script must not be run as root')
+
+    def git_clone_repo(self) -> None:
+        self.heading('Cloning mypy git repository')
+        self.run('git clone https://github.com/python/mypy')
+
+    def git_check_out_tag(self) -> None:
+        tag = 'v{}'.format(self.version)
+        self.heading('Check out {}'.format(tag))
+        self.run('cd mypy && git checkout {}'.format(tag))
+        self.run('cd mypy && git submodule update --init typeshed'.format(tag))
+
+    def make_virtualenv(self) -> None:
+        self.heading('Creating a fresh virtualenv')
+        self.run('virtualenv -p {} mypy-venv'.format(sys.executable))
+
+    def install_dependencies(self) -> None:
+        self.heading('Installing build dependencies')
+        self.run_in_virtualenv('pip3 install wheel twine && pip3 install -U setuptools')
+
+    def make_wheel(self) -> None:
+        self.heading('Building wheel')
+        self.run_in_virtualenv('python3 setup.py bdist_wheel')
+
+    def make_sdist(self) -> None:
+        self.heading('Building sdist')
+        self.run_in_virtualenv('python3 setup.py sdist')
+
+    def upload_wheel(self) -> None:
+        self.heading('Uploading wheel')
+        self.run_in_virtualenv('twine upload dist/mypy-{}-py3-none-any.whl'.format(self.version))
+
+    def upload_sdist(self) -> None:
+        self.heading('Uploading sdist')
+        self.run_in_virtualenv('twine upload dist/mypy-{}.tar.gz'.format(self.version))
+
+    def run(self, cmd: str) -> None:
+        try:
+            subprocess.check_call(cmd, shell=True, cwd=self.target_dir)
+        except subprocess.CalledProcessError:
+            sys.stderr.write('Error: Command {!r} failed\n'.format(cmd))
+            sys.exit(1)
+
+    def run_in_virtualenv(self, cmd: str) -> None:
+        self.run('source mypy-venv/bin/activate && cd mypy &&' + cmd)
+
+    def heading(self, heading: str) -> None:
+        print()
+        print('==== {} ===='.format(heading))
+        print()
+
+
+def parse_args() -> Any:
+    parser = argparse.ArgumentParser(
+        description='PyPI mypy package uploader (for non-Windows packages only)')
+    parser.add_argument('--force', action='store_true', default=False,
+                        help='Skip prompts and sanity checks (be careful!)')
+    parser.add_argument('--no-upload', action='store_true', default=False,
+                        help="Only build packages but don't upload")
+    parser.add_argument('version', help='Mypy version to release')
+    return parser.parse_args()
+
+
+if __name__ == '__main__':
+    args = parse_args()
+    builder = Builder(args.version, args.force, args.no_upload)
+    builder.build_and_upload()
diff --git a/misc/variadics.py b/misc/variadics.py
new file mode 100644
index 0000000..9200288
--- /dev/null
+++ b/misc/variadics.py
@@ -0,0 +1,54 @@
+"""Example of code generation approach to variadics.
+
+See https://github.com/python/typing/issues/193#issuecomment-236383893
+"""
+
+LIMIT = 5
+BOUND = 'object'
+
+def prelude(limit: int, bound: str) -> None:
+    print('from typing import Callable, Iterable, Iterator, Tuple, TypeVar, overload')
+    print('Ts = TypeVar(\'Ts\', bound={bound})'.format(bound=bound))
+    print('R = TypeVar(\'R\')')
+    for i in range(LIMIT):
+        print('T{i} = TypeVar(\'T{i}\', bound={bound})'.format(i=i+1, bound=bound))
+
+def expand_template(template: str,
+                    arg_template: str = 'arg{i}: {Ts}',
+                    lower: int = 0,
+                    limit: int = LIMIT) -> None:
+    print()
+    for i in range(lower, limit):
+        tvs = ', '.join('T{i}'.format(i=j+1) for j in range(i))
+        args = ', '.join(arg_template.format(i=j+1, Ts='T{}'.format(j+1))
+                         for j in range(i))
+        print('@overload')
+        s = template.format(Ts=tvs, argsTs=args)
+        s = s.replace('Tuple[]', 'Tuple[()]')
+        print(s)
+    args_l = [arg_template.format(i=j+1, Ts='Ts') for j in range(limit)]
+    args_l.append('*' + (arg_template.format(i='s', Ts='Ts')))
+    args = ', '.join(args_l)
+    s = template.format(Ts='Ts, ...', argsTs=args)
+    s = s.replace('Callable[[Ts, ...]', 'Callable[...')
+    print('@overload')
+    print(s)
+
+def main():
+    prelude(LIMIT, BOUND)
+
+    # map()
+    expand_template('def map(func: Callable[[{Ts}], R], {argsTs}) -> R: ...',
+                    lower=1)
+    # zip()
+    expand_template('def zip({argsTs}) -> Tuple[{Ts}]: ...')
+
+    # Naomi's examples
+    expand_template('def my_zip({argsTs}) -> Iterator[Tuple[{Ts}]]: ...',
+                    'arg{i}: Iterable[{Ts}]')
+    expand_template('def make_check({argsTs}) -> Callable[[{Ts}], bool]: ...')
+    expand_template('def my_map(f: Callable[[{Ts}], R], {argsTs}) -> Iterator[R]: ...',
+                    'arg{i}: Iterable[{Ts}]')
+                    
+
+main()
diff --git a/mypy.egg-info/PKG-INFO b/mypy.egg-info/PKG-INFO
index b194318..641f4ed 100644
--- a/mypy.egg-info/PKG-INFO
+++ b/mypy.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: mypy
-Version: 0.520
+Version: 0.521
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
diff --git a/mypy.egg-info/SOURCES.txt b/mypy.egg-info/SOURCES.txt
index de025b6..0316925 100644
--- a/mypy.egg-info/SOURCES.txt
+++ b/mypy.egg-info/SOURCES.txt
@@ -1,6 +1,66 @@
+.gitignore
+.gitmodules
+.travis.yml
+CONTRIBUTING.md
+CREDITS
+LICENSE
 MANIFEST.in
+README.md
+ROADMAP.md
+appveyor.yml
+build-requirements.txt
+conftest.py
+mypy_self_check.ini
+pytest.ini
+runtests.py
 setup.cfg
 setup.py
+test-requirements.txt
+typeshed
+docs/Makefile
+docs/README.md
+docs/make.bat
+docs/requirements-docs.txt
+docs/source/additional_features.rst
+docs/source/basics.rst
+docs/source/builtin_types.rst
+docs/source/casts.rst
+docs/source/cheat_sheet.rst
+docs/source/cheat_sheet_py3.rst
+docs/source/class_basics.rst
+docs/source/command_line.rst
+docs/source/common_issues.rst
+docs/source/conf.py
+docs/source/config_file.rst
+docs/source/duck_type_compatibility.rst
+docs/source/dynamic_typing.rst
+docs/source/faq.rst
+docs/source/function_overloading.rst
+docs/source/generics.rst
+docs/source/getting_started.rst
+docs/source/index.rst
+docs/source/introduction.rst
+docs/source/kinds_of_types.rst
+docs/source/python2.rst
+docs/source/python36.rst
+docs/source/revision_history.rst
+docs/source/supported_python_features.rst
+docs/source/type_inference_and_annotations.rst
+extensions/README.md
+extensions/mypy_extensions.py
+extensions/setup.py
+misc/actions_stubs.py
+misc/analyze_cache.py
+misc/async_matrix.py
+misc/fix_annotate.py
+misc/incremental_checker.py
+misc/macs.el
+misc/perf_checker.py
+misc/remove-eol-whitespace.sh
+misc/test_case_to_actual.py
+misc/touch_checker.py
+misc/upload-pypi.py
+misc/variadics.py
 mypy/__init__.py
 mypy/__main__.py
 mypy/api.py
@@ -62,12 +122,253 @@ mypy.egg-info/dependency_links.txt
 mypy.egg-info/entry_points.txt
 mypy.egg-info/requires.txt
 mypy.egg-info/top_level.txt
+mypy/myunit/__init__.py
+mypy/myunit/__main__.py
+mypy/server/__init__.py
+mypy/server/astdiff.py
+mypy/server/astmerge.py
+mypy/server/aststrip.py
+mypy/server/deps.py
+mypy/server/subexpr.py
+mypy/server/target.py
+mypy/server/trigger.py
+mypy/server/update.py
+mypy/test/__init__.py
+mypy/test/collect.py
+mypy/test/config.py
+mypy/test/data.py
+mypy/test/helpers.py
+mypy/test/testargs.py
+mypy/test/testcheck.py
+mypy/test/testcmdline.py
+mypy/test/testdeps.py
+mypy/test/testdiff.py
+mypy/test/testextensions.py
+mypy/test/testfinegrained.py
+mypy/test/testgraph.py
+mypy/test/testinfer.py
+mypy/test/testmerge.py
+mypy/test/testmoduleinfo.py
+mypy/test/testparse.py
+mypy/test/testpythoneval.py
+mypy/test/testreports.py
+mypy/test/testsemanal.py
+mypy/test/testsolve.py
+mypy/test/teststubgen.py
+mypy/test/testsubtypes.py
+mypy/test/testtransform.py
+mypy/test/testtypegen.py
+mypy/test/testtypes.py
+mypy/test/update.py
+pinfer/.gitignore
+pinfer/LICENSE
+pinfer/README
+pinfer/__init__.py
+pinfer/inspect3.py
+pinfer/p.py
+pinfer/pinfer.py
+pinfer/test_pinfer.py
+pinfer/test_pinfer3.py
+pinfer/unparse.py
+pinfer/unparse3.py
 scripts/dumpmodule.py
 scripts/find_type.py
 scripts/mypy
 scripts/mypy.bat
 scripts/stubgen
 scripts/stubtest.py
+scripts/__pycache__/dumpmodule.cpython-36.pyc
+test-data/.flake8
+test-data/samples/bottles.py
+test-data/samples/class.py
+test-data/samples/cmdline.py
+test-data/samples/crawl.py
+test-data/samples/crawl2.py
+test-data/samples/dict.py
+test-data/samples/fib.py
+test-data/samples/files.py
+test-data/samples/for.py
+test-data/samples/generators.py
+test-data/samples/greet.py
+test-data/samples/guess.py
+test-data/samples/hello.py
+test-data/samples/input.py
+test-data/samples/itertool.py
+test-data/samples/readme.txt
+test-data/samples/regexp.py
+test-data/stdlib-samples/3.2/base64.py
+test-data/stdlib-samples/3.2/fnmatch.py
+test-data/stdlib-samples/3.2/genericpath.py
+test-data/stdlib-samples/3.2/getopt.py
+test-data/stdlib-samples/3.2/glob.py
+test-data/stdlib-samples/3.2/posixpath.py
+test-data/stdlib-samples/3.2/pprint.py
+test-data/stdlib-samples/3.2/random.py
+test-data/stdlib-samples/3.2/shutil.py
+test-data/stdlib-samples/3.2/subprocess.py
+test-data/stdlib-samples/3.2/tempfile.py
+test-data/stdlib-samples/3.2/textwrap.py
+test-data/stdlib-samples/3.2/incomplete/logging/__init__.py
+test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py
+test-data/stdlib-samples/3.2/incomplete/urllib/parse.py
+test-data/stdlib-samples/3.2/test/__init__.py
+test-data/stdlib-samples/3.2/test/randv2_32.pck
+test-data/stdlib-samples/3.2/test/randv2_64.pck
+test-data/stdlib-samples/3.2/test/randv3.pck
+test-data/stdlib-samples/3.2/test/support.py
+test-data/stdlib-samples/3.2/test/test_base64.py
+test-data/stdlib-samples/3.2/test/test_fnmatch.py
+test-data/stdlib-samples/3.2/test/test_genericpath.py
+test-data/stdlib-samples/3.2/test/test_getopt.py
+test-data/stdlib-samples/3.2/test/test_glob.py
+test-data/stdlib-samples/3.2/test/test_posixpath.py
+test-data/stdlib-samples/3.2/test/test_pprint.py
+test-data/stdlib-samples/3.2/test/test_random.py
+test-data/stdlib-samples/3.2/test/test_set.py
+test-data/stdlib-samples/3.2/test/test_shutil.py
+test-data/stdlib-samples/3.2/test/test_subprocess.py
+test-data/stdlib-samples/3.2/test/test_tempfile.py
+test-data/stdlib-samples/3.2/test/test_textwrap.py
+test-data/stdlib-samples/3.2/test/tf_inherit_check.py
+test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py
+test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py
+test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py
+test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py
+test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py
+test-data/unit/README.md
+test-data/unit/check-abstract.test
+test-data/unit/check-async-await.test
+test-data/unit/check-basic.test
+test-data/unit/check-bound.test
+test-data/unit/check-callable.test
+test-data/unit/check-class-namedtuple.test
+test-data/unit/check-classes.test
+test-data/unit/check-classvar.test
+test-data/unit/check-columns.test
+test-data/unit/check-custom-plugin.test
+test-data/unit/check-dynamic-typing.test
+test-data/unit/check-enum.test
+test-data/unit/check-expressions.test
+test-data/unit/check-fastparse.test
+test-data/unit/check-flags.test
+test-data/unit/check-functions.test
+test-data/unit/check-generic-subtyping.test
+test-data/unit/check-generics.test
+test-data/unit/check-ignore.test
+test-data/unit/check-incomplete-fixture.test
+test-data/unit/check-incremental.test
+test-data/unit/check-inference-context.test
+test-data/unit/check-inference.test
+test-data/unit/check-isinstance.test
+test-data/unit/check-kwargs.test
+test-data/unit/check-lists.test
+test-data/unit/check-modules.test
+test-data/unit/check-multiple-inheritance.test
+test-data/unit/check-namedtuple.test
+test-data/unit/check-newsyntax.test
+test-data/unit/check-newtype.test
+test-data/unit/check-optional.test
+test-data/unit/check-overloading.test
+test-data/unit/check-python2.test
+test-data/unit/check-selftype.test
+test-data/unit/check-semanal-error.test
+test-data/unit/check-serialize.test
+test-data/unit/check-statements.test
+test-data/unit/check-super.test
+test-data/unit/check-tuples.test
+test-data/unit/check-type-aliases.test
+test-data/unit/check-type-checks.test
+test-data/unit/check-type-promotion.test
+test-data/unit/check-typeddict.test
+test-data/unit/check-typevar-values.test
+test-data/unit/check-underscores.test
+test-data/unit/check-unions.test
+test-data/unit/check-unreachable-code.test
+test-data/unit/check-unsupported.test
+test-data/unit/check-varargs.test
+test-data/unit/check-warnings.test
+test-data/unit/cmdline.test
+test-data/unit/deps.test
+test-data/unit/diff.test
+test-data/unit/fine-grained.test
+test-data/unit/merge.test
+test-data/unit/parse-errors.test
+test-data/unit/parse-python2.test
+test-data/unit/parse.test
+test-data/unit/python2eval.test
+test-data/unit/pythoneval-asyncio.test
+test-data/unit/pythoneval.test
+test-data/unit/semanal-abstractclasses.test
+test-data/unit/semanal-basic.test
+test-data/unit/semanal-classes.test
+test-data/unit/semanal-classvar.test
+test-data/unit/semanal-errors.test
+test-data/unit/semanal-expressions.test
+test-data/unit/semanal-modules.test
+test-data/unit/semanal-namedtuple.test
+test-data/unit/semanal-python2.test
+test-data/unit/semanal-statements.test
+test-data/unit/semanal-symtable.test
+test-data/unit/semanal-typealiases.test
+test-data/unit/semanal-typeddict.test
+test-data/unit/semanal-typeinfo.test
+test-data/unit/semanal-types.test
+test-data/unit/stubgen.test
+test-data/unit/typexport-basic.test
+test-data/unit/fixtures/__new__.pyi
+test-data/unit/fixtures/alias.pyi
+test-data/unit/fixtures/args.pyi
+test-data/unit/fixtures/async_await.pyi
+test-data/unit/fixtures/bool.pyi
+test-data/unit/fixtures/callable.pyi
+test-data/unit/fixtures/classmethod.pyi
+test-data/unit/fixtures/complex.pyi
+test-data/unit/fixtures/dict.pyi
+test-data/unit/fixtures/exception.pyi
+test-data/unit/fixtures/f_string.pyi
+test-data/unit/fixtures/fine_grained.pyi
+test-data/unit/fixtures/float.pyi
+test-data/unit/fixtures/floatdict.pyi
+test-data/unit/fixtures/for.pyi
+test-data/unit/fixtures/function.pyi
+test-data/unit/fixtures/isinstance.pyi
+test-data/unit/fixtures/isinstancelist.pyi
+test-data/unit/fixtures/list.pyi
+test-data/unit/fixtures/module.pyi
+test-data/unit/fixtures/module_all.pyi
+test-data/unit/fixtures/module_all_python2.pyi
+test-data/unit/fixtures/ops.pyi
+test-data/unit/fixtures/primitives.pyi
+test-data/unit/fixtures/property.pyi
+test-data/unit/fixtures/python2.pyi
+test-data/unit/fixtures/set.pyi
+test-data/unit/fixtures/slice.pyi
+test-data/unit/fixtures/staticmethod.pyi
+test-data/unit/fixtures/transform.pyi
+test-data/unit/fixtures/tuple-simple.pyi
+test-data/unit/fixtures/tuple.pyi
+test-data/unit/fixtures/type.pyi
+test-data/unit/fixtures/typing-full.pyi
+test-data/unit/fixtures/union.pyi
+test-data/unit/lib-stub/__builtin__.pyi
+test-data/unit/lib-stub/abc.pyi
+test-data/unit/lib-stub/builtins.pyi
+test-data/unit/lib-stub/collections.pyi
+test-data/unit/lib-stub/enum.pyi
+test-data/unit/lib-stub/mypy_extensions.pyi
+test-data/unit/lib-stub/six.pyi
+test-data/unit/lib-stub/sys.pyi
+test-data/unit/lib-stub/types.pyi
+test-data/unit/lib-stub/typing.pyi
+test-data/unit/plugins/attrhook.py
+test-data/unit/plugins/badreturn.py
+test-data/unit/plugins/badreturn2.py
+test-data/unit/plugins/fnplugin.py
+test-data/unit/plugins/named_callable.py
+test-data/unit/plugins/noentry.py
+test-data/unit/plugins/plugin2.py
+test-data/unit/plugins/type_anal_hook.py
+tmp-test-dirs/.gitignore
 typeshed/stdlib/2/BaseHTTPServer.pyi
 typeshed/stdlib/2/ConfigParser.pyi
 typeshed/stdlib/2/Cookie.pyi
diff --git a/mypy.egg-info/requires.txt b/mypy.egg-info/requires.txt
index f175af7..adb6794 100644
--- a/mypy.egg-info/requires.txt
+++ b/mypy.egg-info/requires.txt
@@ -1 +1 @@
-typed-ast >= 1.0.4, < 1.1.0
+typed-ast<1.1.0,>=1.0.4
diff --git a/mypy/build.py b/mypy/build.py
index 18811bf..471f819 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -860,16 +860,17 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
     """
     # TODO: May need to take more build options into account
     meta_json, data_json = get_cache_names(id, path, manager)
-    manager.trace('Looking for {} {}'.format(id, data_json))
+    manager.trace('Looking for {} at {}'.format(id, meta_json))
     if not os.path.exists(meta_json):
-        manager.trace('Could not load cache for {}: could not find {}'.format(id, meta_json))
+        manager.log('Could not load cache for {}: could not find {}'.format(id, meta_json))
         return None
     with open(meta_json, 'r') as f:
         meta_str = f.read()
         manager.trace('Meta {} {}'.format(id, meta_str.rstrip()))
         meta = json.loads(meta_str)  # TODO: Errors
     if not isinstance(meta, dict):
-        manager.trace('Could not load cache for {}: meta cache is not a dict'.format(id))
+        manager.log('Could not load cache for {}: meta cache is not a dict: {}'
+                    .format(id, repr(meta)))
         return None
     m = CacheMeta(
         meta.get('id'),
@@ -891,27 +892,36 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
     if (m.id != id or
             m.mtime is None or m.size is None or
             m.dependencies is None or m.data_mtime is None):
-        manager.trace('Metadata abandoned for {}: attributes are missing'.format(id))
+        manager.log('Metadata abandoned for {}: attributes are missing'.format(id))
         return None
 
     # Ignore cache if generated by an older mypy version.
     if ((m.version_id != manager.version_id and not manager.options.skip_version_check)
             or m.options is None
             or len(m.dependencies) != len(m.dep_prios)):
-        manager.trace('Metadata abandoned for {}: new attributes are missing'.format(id))
+        manager.log('Metadata abandoned for {}: new attributes are missing'.format(id))
         return None
 
     # Ignore cache if (relevant) options aren't the same.
+    # Note that it's fine to mutilate cached_options since it's only used here.
     cached_options = m.options
     current_options = manager.options.clone_for_module(id).select_options_affecting_cache()
     if manager.options.quick_and_dirty:
         # In quick_and_dirty mode allow non-quick_and_dirty cache files.
         cached_options['quick_and_dirty'] = True
-    if not cached_options.get('platform') and manager.options.skip_version_check:
-        # Older versions didn't write platform.
-        cached_options['platform'] = manager.options.platform
+    if manager.options.skip_version_check:
+        # When we're lax about version we're also lax about platform.
+        cached_options['platform'] = current_options['platform']
+    if 'debug_cache' in cached_options:
+        # Older versions included debug_cache, but it's silly to compare it.
+        del cached_options['debug_cache']
     if cached_options != current_options:
-        manager.trace('Metadata abandoned for {}: options differ'.format(id))
+        manager.log('Metadata abandoned for {}: options differ'.format(id))
+        if manager.options.verbosity >= 2:
+            for key in sorted(set(cached_options) | set(current_options)):
+                if cached_options.get(key) != current_options.get(key):
+                    manager.trace('    {}: {} != {}'
+                                  .format(key, cached_options.get(key), current_options.get(key)))
         return None
 
     return m
@@ -948,41 +958,63 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: str,
     # we use cache data file mtime to propagate information about changes in the dependencies.
 
     if meta is None:
+        manager.log('Metadata not found for {}'.format(id))
+        return None
+
+    # Check data_json; assume if its mtime matches it's good.
+    # TODO: stat() errors
+    data_mtime = getmtime(meta.data_json)
+    if data_mtime != meta.data_mtime:
+        manager.log('Metadata abandoned for {}: data cache is modified'.format(id))
         return None
 
     # TODO: Share stat() outcome with find_module()
     path = os.path.abspath(path)
     st = manager.get_stat(path)  # TODO: Errors
-    if st.st_size != meta.size:
+    size = st.st_size
+    if size != meta.size:
         manager.log('Metadata abandoned for {}: file {} has different size'.format(id, path))
         return None
 
-    if int(st.st_mtime) != meta.mtime or path != meta.path:
+    mtime = int(st.st_mtime)
+    if mtime != meta.mtime or path != meta.path:
         with open(path, 'rb') as f:
             source_hash = hashlib.md5(f.read()).hexdigest()
         if source_hash != meta.hash:
             manager.log('Metadata abandoned for {}: file {} has different hash'.format(id, path))
             return None
         else:
-            manager.log('Metadata ok for {}: file {} (match on path, size, hash)'.format(id, path))
             # Optimization: update mtime and path (otherwise, this mismatch will reappear).
-            meta = meta._replace(mtime=int(st.st_mtime), path=path)
+            meta = meta._replace(mtime=mtime, path=path)
+            # Construct a dict we can pass to json.dumps() (compare to write_cache()).
+            meta_dict = {
+                'id': id,
+                'path': path,
+                'mtime': mtime,
+                'size': size,
+                'hash': source_hash,
+                'data_mtime': data_mtime,
+                'dependencies': meta.dependencies,
+                'suppressed': meta.suppressed,
+                'child_modules': meta.child_modules,
+                'options': (manager.options.clone_for_module(id)
+                            .select_options_affecting_cache()),
+                'dep_prios': meta.dep_prios,
+                'interface_hash': meta.interface_hash,
+                'version_id': manager.version_id,
+            }
             if manager.options.debug_cache:
-                meta_str = json.dumps(meta, indent=2, sort_keys=True)
+                meta_str = json.dumps(meta_dict, indent=2, sort_keys=True)
             else:
-                meta_str = json.dumps(meta)
+                meta_str = json.dumps(meta_dict)
             meta_json, _ = get_cache_names(id, path, manager)
             manager.log('Updating mtime for {}: file {}, meta {}, mtime {}'
                         .format(id, path, meta_json, meta.mtime))
-            atomic_write(meta_json, meta_str)  # Ignore errors, since this is just an optimization.
+            atomic_write(meta_json, meta_str, '\n')  # Ignore errors, it's just an optimization.
+            return meta
 
-    # It's a match on (id, path, mtime/hash, size).
-    # Check data_json; assume if its mtime matches it's good.
-    # TODO: stat() errors
-    if getmtime(meta.data_json) != meta.data_mtime:
-        manager.log('Metadata abandoned for {}: data cache is modified'.format(id))
-        return None
-    manager.log('Found {} {} (metadata is fresh)'.format(id, meta.data_json))
+    # It's a match on (id, path, size, hash, mtime).
+    manager.log('Metadata fresh for {}: file {}'.format(id, path))
     return meta
 
 
@@ -1096,7 +1128,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
         meta_str = json.dumps(meta, indent=2, sort_keys=True)
     else:
         meta_str = json.dumps(meta)
-    if not atomic_write(meta_json, meta_str):
+    if not atomic_write(meta_json, meta_str, '\n'):
         # Most likely the error is the replace() call
         # (see https://github.com/python/mypy/issues/3215).
         # The next run will simply find the cache entry out of date.
diff --git a/mypy/checker.py b/mypy/checker.py
index 68284c4..aab56b5 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -1413,8 +1413,11 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
 
             if base_type:
                 if not has_no_typevars(base_type):
-                    # TODO: Handle TupleType, don't cast
-                    instance = cast(Instance, self.scope.active_self_type())
+                    self_type = self.scope.active_self_type()
+                    if isinstance(self_type, TupleType):
+                        instance = self_type.fallback
+                    else:
+                        instance = self_type
                     itype = map_instance_to_supertype(instance, base)
                     base_type = expand_type_by_instance(base_type, itype)
 
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index 58835c6..26a4abe 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -390,7 +390,7 @@ class ASTConverter(ast3.NodeTransformer):  # type: ignore  # typeshed PR #931
             return func_def
 
     def set_type_optional(self, type: Type, initializer: Expression) -> None:
-        if self.options.no_implicit_optional or not experiments.STRICT_OPTIONAL:
+        if self.options.no_implicit_optional:
             return
         # Indicate that type should be wrapped in an Optional if arg is initialized to None.
         optional = isinstance(initializer, NameExpr) and initializer.name == 'None'
diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py
index 109dfe4..0f1bd63 100644
--- a/mypy/fastparse2.py
+++ b/mypy/fastparse2.py
@@ -364,7 +364,7 @@ class ASTConverter(ast27.NodeTransformer):
             return func_def
 
     def set_type_optional(self, type: Type, initializer: Expression) -> None:
-        if self.options.no_implicit_optional or not experiments.STRICT_OPTIONAL:
+        if self.options.no_implicit_optional:
             return
         # Indicate that type should be wrapped in an Optional if arg is initialized to None.
         optional = isinstance(initializer, NameExpr) and initializer.name == 'None'
diff --git a/mypy/fixup.py b/mypy/fixup.py
index 5cb1188..d60ac55 100644
--- a/mypy/fixup.py
+++ b/mypy/fixup.py
@@ -88,6 +88,9 @@ class NodeFixer(NodeVisitor[None]):
                     if stnode is not None:
                         value.node = stnode.node
                         value.type_override = stnode.type_override
+                        if (self.quick_and_dirty and value.kind == TYPE_ALIAS and
+                                stnode.type_override is None):
+                            value.type_override = Instance(stale_info(), [])
                         value.alias_tvars = stnode.alias_tvars or []
                     elif not self.quick_and_dirty:
                         assert stnode is not None, "Could not find cross-ref %s" % (cross_ref,)
diff --git a/mypy/myunit/__init__.py b/mypy/myunit/__init__.py
new file mode 100644
index 0000000..bf47b1e
--- /dev/null
+++ b/mypy/myunit/__init__.py
@@ -0,0 +1,383 @@
+import importlib
+import os
+import sys
+import re
+import tempfile
+import time
+import traceback
+
+from typing import List, Tuple, Any, Callable, Union, cast
+from types import TracebackType
+
+
+# TODO remove global state
+is_verbose = False
+is_quiet = False
+patterns = []  # type: List[str]
+times = []  # type: List[Tuple[float, str]]
+
+
+class AssertionFailure(Exception):
+    """Exception used to signal failed test cases."""
+    def __init__(self, s: str = None) -> None:
+        if s:
+            super().__init__(s)
+        else:
+            super().__init__()
+
+
+class SkipTestCaseException(Exception):
+    """Exception used to signal skipped test cases."""
+    pass
+
+
+def assert_true(b: bool, msg: str = None) -> None:
+    if not b:
+        raise AssertionFailure(msg)
+
+
+def assert_false(b: bool, msg: str = None) -> None:
+    if b:
+        raise AssertionFailure(msg)
+
+
+def good_repr(obj: object) -> str:
+    if isinstance(obj, str):
+        if obj.count('\n') > 1:
+            bits = ["'''\\"]
+            for line in obj.split('\n'):
+                # force repr to use ' not ", then cut it off
+                bits.append(repr('"' + line)[2:-1])
+            bits[-1] += "'''"
+            return '\n'.join(bits)
+    return repr(obj)
+
+
+def assert_equal(a: object, b: object, fmt: str = '{} != {}') -> None:
+    if a != b:
+        raise AssertionFailure(fmt.format(good_repr(a), good_repr(b)))
+
+
+def assert_not_equal(a: object, b: object, fmt: str = '{} == {}') -> None:
+    if a == b:
+        raise AssertionFailure(fmt.format(good_repr(a), good_repr(b)))
+
+
+def assert_raises(typ: type, *rest: Any) -> None:
+    """Usage: assert_raises(exception class[, message], function[, args])
+
+    Call function with the given arguments and expect an exception of the given
+    type.
+
+    TODO use overloads for better type checking
+    """
+    # Parse arguments.
+    msg = None  # type: str
+    if isinstance(rest[0], str) or rest[0] is None:
+        msg = rest[0]
+        rest = rest[1:]
+    f = rest[0]
+    args = []  # type: List[Any]
+    if len(rest) > 1:
+        args = rest[1]
+        assert len(rest) <= 2
+
+    # Perform call and verify the exception.
+    try:
+        f(*args)
+    except BaseException as e:
+        if isinstance(e, KeyboardInterrupt):
+            raise
+        assert_type(typ, e)
+        if msg:
+            assert_equal(e.args[0], msg, 'Invalid message {}, expected {}')
+    else:
+        raise AssertionFailure('No exception raised')
+
+
+def assert_type(typ: type, value: object) -> None:
+    if type(value) != typ:
+        raise AssertionFailure('Invalid type {}, expected {}'.format(
+            typename(type(value)), typename(typ)))
+
+
+def fail() -> None:
+    raise AssertionFailure()
+
+
+class TestCase:
+    def __init__(self, name: str, suite: 'Suite' = None,
+                 func: Callable[[], None] = None) -> None:
+        self.func = func
+        self.name = name
+        self.suite = suite
+        self.old_cwd = None  # type: str
+        self.tmpdir = None  # type: tempfile.TemporaryDirectory
+
+    def run(self) -> None:
+        if self.func:
+            self.func()
+
+    def set_up(self) -> None:
+        self.old_cwd = os.getcwd()
+        self.tmpdir = tempfile.TemporaryDirectory(prefix='mypy-test-')
+        os.chdir(self.tmpdir.name)
+        os.mkdir('tmp')
+        if self.suite:
+            self.suite.set_up()
+
+    def tear_down(self) -> None:
+        if self.suite:
+            self.suite.tear_down()
+        os.chdir(self.old_cwd)
+        try:
+            self.tmpdir.cleanup()
+        except OSError:
+            pass
+        self.old_cwd = None
+        self.tmpdir = None
+
+
+class Suite:
+    def __init__(self) -> None:
+        self.prefix = typename(type(self)) + '.'
+        # Each test case is either a TestCase object or (str, function).
+        self._test_cases = []  # type: List[Any]
+        self.init()
+
+    def set_up(self) -> None:
+        pass
+
+    def tear_down(self) -> None:
+        pass
+
+    def init(self) -> None:
+        for m in dir(self):
+            if m.startswith('test'):
+                t = getattr(self, m)
+                if isinstance(t, Suite):
+                    self.add_test((m + '.', t))
+                else:
+                    self.add_test(TestCase(m, self, getattr(self, m)))
+
+    def add_test(self, test: Union[TestCase,
+                                   Tuple[str, Callable[[], None]],
+                                   Tuple[str, 'Suite']]) -> None:
+        self._test_cases.append(test)
+
+    def cases(self) -> List[Any]:
+        return self._test_cases[:]
+
+    def skip(self) -> None:
+        raise SkipTestCaseException()
+
+
+def add_suites_from_module(suites: List[Suite], mod_name: str) -> None:
+    mod = importlib.import_module(mod_name)
+    got_suite = False
+    for suite in mod.__dict__.values():
+        if isinstance(suite, type) and issubclass(suite, Suite) and suite is not Suite:
+            got_suite = True
+            suites.append(cast(Callable[[], Suite], suite)())
+    if not got_suite:
+        # Sanity check in case e.g. it uses unittest instead of a myunit.
+        # The codecs tests do since they need to be python2-compatible.
+        sys.exit('Test module %s had no test!' % mod_name)
+
+
+class ListSuite(Suite):
+    def __init__(self, suites: List[Suite]) -> None:
+        for suite in suites:
+            mod_name = type(suite).__module__.replace('.', '_')
+            mod_name = mod_name.replace('mypy_', '')
+            mod_name = mod_name.replace('test_', '')
+            mod_name = mod_name.strip('_').replace('__', '_')
+            type_name = type(suite).__name__
+            name = 'test_%s_%s' % (mod_name, type_name)
+            setattr(self, name, suite)
+        super().__init__()
+
+
+def main(args: List[str] = None) -> None:
+    global patterns, is_verbose, is_quiet
+    if not args:
+        args = sys.argv[1:]
+    is_verbose = False
+    is_quiet = False
+    suites = []  # type: List[Suite]
+    patterns = []
+    i = 0
+    while i < len(args):
+        a = args[i]
+        if a == '-v':
+            is_verbose = True
+        elif a == '-q':
+            is_quiet = True
+        elif a == '-m':
+            i += 1
+            if i == len(args):
+                sys.exit('-m requires an argument')
+            add_suites_from_module(suites, args[i])
+        elif not a.startswith('-'):
+            patterns.append(a)
+        else:
+            sys.exit('Usage: python -m mypy.myunit [-v] [-q]'
+                    + ' -m mypy.test.module [-m mypy.test.module ...] [filter ...]')
+        i += 1
+    if len(patterns) == 0:
+        patterns.append('*')
+    if not suites:
+        sys.exit('At least one -m argument is required')
+
+    t = ListSuite(suites)
+    num_total, num_fail, num_skip = run_test_recursive(t, 0, 0, 0, '', 0)
+
+    skip_msg = ''
+    if num_skip > 0:
+        skip_msg = ', {} skipped'.format(num_skip)
+
+    if num_fail == 0:
+        if not is_quiet:
+            print('%d test cases run%s, all passed.' % (num_total, skip_msg))
+            print('*** OK ***')
+    else:
+        sys.stderr.write('%d/%d test cases failed%s.\n' % (num_fail,
+                                                           num_total,
+                                                           skip_msg))
+        sys.stderr.write('*** FAILURE ***\n')
+        sys.exit(1)
+
+
+def run_test_recursive(test: Any, num_total: int, num_fail: int, num_skip: int,
+                       prefix: str, depth: int) -> Tuple[int, int, int]:
+    """The first argument may be TestCase, Suite or (str, Suite)."""
+    if isinstance(test, TestCase):
+        name = prefix + test.name
+        for pattern in patterns:
+            if match_pattern(name, pattern):
+                match = True
+                break
+        else:
+            match = False
+        if match:
+            is_fail, is_skip = run_single_test(name, test)
+            if is_fail: num_fail += 1
+            if is_skip: num_skip += 1
+            num_total += 1
+    else:
+        suite = None  # type: Suite
+        suite_prefix = ''
+        if isinstance(test, list) or isinstance(test, tuple):
+            suite = test[1]
+            suite_prefix = test[0]
+        else:
+            suite = test
+            suite_prefix = test.prefix
+
+        for stest in suite.cases():
+            new_prefix = prefix
+            if depth > 0:
+                new_prefix = prefix + suite_prefix
+            num_total, num_fail, num_skip = run_test_recursive(
+                stest, num_total, num_fail, num_skip, new_prefix, depth + 1)
+    return num_total, num_fail, num_skip
+
+
+def run_single_test(name: str, test: Any) -> Tuple[bool, bool]:
+    if is_verbose:
+        sys.stderr.write(name)
+        sys.stderr.flush()
+
+    time0 = time.time()
+    test.set_up()  # FIX: check exceptions
+    exc_traceback = None  # type: Any
+    try:
+        test.run()
+    except BaseException as e:
+        if isinstance(e, KeyboardInterrupt):
+            raise
+        exc_type, exc_value, exc_traceback = sys.exc_info()
+    finally:
+        test.tear_down()
+    times.append((time.time() - time0, name))
+
+    if exc_traceback:
+        if isinstance(exc_value, SkipTestCaseException):
+            if is_verbose:
+                sys.stderr.write(' (skipped)\n')
+            return False, True
+        else:
+            handle_failure(name, exc_type, exc_value, exc_traceback)
+            return True, False
+    elif is_verbose:
+        sys.stderr.write('\n')
+
+    return False, False
+
+
+def handle_failure(name: str,
+                   exc_type: type,
+                   exc_value: BaseException,
+                   exc_traceback: TracebackType,
+                   ) -> None:
+    # Report failed test case.
+    if is_verbose:
+        sys.stderr.write('\n\n')
+    msg = ''
+    if exc_value.args and exc_value.args[0]:
+        msg = ': ' + str(exc_value)
+    else:
+        msg = ''
+    if not isinstance(exc_value, SystemExit):
+        # We assume that before doing exit() (which raises SystemExit) we've printed
+        # enough context about what happened so that a stack trace is not useful.
+        # In particular, uncaught exceptions during semantic analysis or type checking
+        # call exit() and they already print out a stack trace.
+        sys.stderr.write('Traceback (most recent call last):\n')
+        tb = traceback.format_tb(exc_traceback)
+        tb = clean_traceback(tb)
+        for s in tb:
+            sys.stderr.write(s)
+    else:
+        sys.stderr.write('\n')
+    exception = typename(exc_type)
+    sys.stderr.write('{}{}\n\n'.format(exception, msg))
+    sys.stderr.write('{} failed\n\n'.format(name))
+
+
+def typename(t: type) -> str:
+    if '.' in str(t):
+        return str(t).split('.')[-1].rstrip("'>")
+    else:
+        return str(t)[8:-2]
+
+
+def match_pattern(s: str, p: str) -> bool:
+    if len(p) == 0:
+        return len(s) == 0
+    elif p[0] == '*':
+        if len(p) == 1:
+            return True
+        else:
+            for i in range(len(s) + 1):
+                if match_pattern(s[i:], p[1:]):
+                    return True
+            return False
+    elif len(s) == 0:
+        return False
+    else:
+        return s[0] == p[0] and match_pattern(s[1:], p[1:])
+
+
+def clean_traceback(tb: List[str]) -> List[str]:
+    # Remove clutter from the traceback.
+    start = 0
+    for i, s in enumerate(tb):
+        if '\n    test.run()\n' in s or '\n    self.func()\n' in s:
+            start = i + 1
+    tb = tb[start:]
+    for f in ['assert_equal', 'assert_not_equal', 'assert_type',
+              'assert_raises', 'assert_true']:
+        if tb != [] and ', in {}\n'.format(f) in tb[-1]:
+            tb = tb[:-1]
+    return tb
diff --git a/mypy/myunit/__main__.py b/mypy/myunit/__main__.py
new file mode 100644
index 0000000..78ef01f
--- /dev/null
+++ b/mypy/myunit/__main__.py
@@ -0,0 +1,18 @@
+# This is a separate module from mypy.myunit so it doesn't exist twice.
+"""Myunit test runner command line tool.
+
+Usually used as a slave by runtests.py, but can be used directly.
+"""
+
+from mypy.myunit import main
+
+# In Python 3.3, mypy.__path__ contains a relative path to the mypy module
+# (whereas in later Python versions it contains an absolute path).  Because the
+# test runner changes directories, this breaks non-toplevel mypy imports.  We
+# fix that problem by fixing up the path to be absolute here.
+import os.path
+import mypy
+# User-defined packages always have __path__ attributes, but mypy doesn't know that.
+mypy.__path__ = [os.path.abspath(p) for p in mypy.__path__]  # type: ignore
+
+main()
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 6cc75f5..796e809 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -2279,11 +2279,14 @@ class SymbolTableNode:
     cross_ref = None  # type: Optional[str]
     # Was this node created by normalіze_type_alias?
     normalized = False  # type: bool
+    # Was this defined by assignment to self attribute?
+    implicit = False  # type: bool
 
     def __init__(self, kind: int, node: Optional[SymbolNode], mod_id: str = None,
                  typ: 'mypy.types.Type' = None,
                  module_public: bool = True, normalized: bool = False,
-                 alias_tvars: Optional[List[str]] = None) -> None:
+                 alias_tvars: Optional[List[str]] = None,
+                 implicit: bool = False) -> None:
         self.kind = kind
         self.node = node
         self.type_override = typ
@@ -2291,6 +2294,7 @@ class SymbolTableNode:
         self.module_public = module_public
         self.normalized = normalized
         self.alias_tvars = alias_tvars
+        self.implicit = implicit
 
     @property
     def fullname(self) -> Optional[str]:
@@ -2334,6 +2338,10 @@ class SymbolTableNode:
                 }  # type: JsonDict
         if not self.module_public:
             data['module_public'] = False
+        if self.normalized:
+            data['normalized'] = True
+        if self.implicit:
+            data['implicit'] = True
         if self.kind == MODULE_REF:
             assert self.node is not None, "Missing module cross ref in %s for %s" % (prefix, name)
             data['cross_ref'] = self.node.fullname()
@@ -2371,6 +2379,10 @@ class SymbolTableNode:
                 stnode.alias_tvars = data['alias_tvars']
         if 'module_public' in data:
             stnode.module_public = data['module_public']
+        if 'normalized' in data:
+            stnode.normalized = data['normalized']
+        if 'implicit' in data:
+            stnode.implicit = data['implicit']
         return stnode
 
 
diff --git a/mypy/options.py b/mypy/options.py
index af7837d..7d0fe09 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -34,7 +34,8 @@ class Options:
         "strict_optional",
     }
 
-    OPTIONS_AFFECTING_CACHE = (PER_MODULE_OPTIONS | {"quick_and_dirty", "platform"})
+    OPTIONS_AFFECTING_CACHE = ((PER_MODULE_OPTIONS | {"quick_and_dirty", "platform"})
+                               - {"debug_cache"})
 
     def __init__(self) -> None:
         # -- build options --
diff --git a/mypy/semanal.py b/mypy/semanal.py
index bcbe495..15ccbe5 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -1809,11 +1809,12 @@ class SemanticAnalyzer(NodeVisitor):
             lval.is_def = True
             v = Var(lval.name)
             v.set_line(lval)
+            v._fullname = self.qualified_name(lval.name)
             v.info = self.type
             v.is_ready = False
             lval.def_var = v
             lval.node = v
-            self.type.names[lval.name] = SymbolTableNode(MDEF, v)
+            self.type.names[lval.name] = SymbolTableNode(MDEF, v, implicit=True)
         self.check_lvalue_validity(lval.node, lval)
 
     def is_self_member_ref(self, memberexpr: MemberExpr) -> bool:
@@ -3383,6 +3384,7 @@ class SemanticAnalyzer(NodeVisitor):
 
     def lookup(self, name: str, ctx: Context) -> SymbolTableNode:
         """Look up an unqualified name in all active namespaces."""
+        implicit_name = False
         # 1a. Name declared using 'global x' takes precedence
         if name in self.global_decls[-1]:
             if name in self.globals:
@@ -3400,7 +3402,11 @@ class SemanticAnalyzer(NodeVisitor):
                 return None
         # 2. Class attributes (if within class definition)
         if self.is_class_scope() and name in self.type.names:
-            return self.type.names[name]
+            node = self.type.names[name]
+            if not node.implicit:
+                return node
+            implicit_name = True
+            implicit_node = node
         # 3. Local (function) scopes
         for table in reversed(self.locals):
             if table is not None and name in table:
@@ -3420,8 +3426,11 @@ class SemanticAnalyzer(NodeVisitor):
                 node = table[name]
                 return node
         # Give up.
-        self.name_not_defined(name, ctx)
-        self.check_for_obsolete_short_name(name, ctx)
+        if not implicit_name:
+            self.name_not_defined(name, ctx)
+            self.check_for_obsolete_short_name(name, ctx)
+        else:
+            return implicit_node
         return None
 
     def check_for_obsolete_short_name(self, name: str, ctx: Context) -> None:
diff --git a/mypy/server/__init__.py b/mypy/server/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py
new file mode 100644
index 0000000..9da9056
--- /dev/null
+++ b/mypy/server/astdiff.py
@@ -0,0 +1,209 @@
+"""Compare two versions of a module symbol table.
+
+The goal is to find which AST nodes have externally visible changes, so
+that we can fire triggers and re-type-check other parts of the program
+that are stale because of the changes.
+
+Only look at detail at definitions at the current module.
+"""
+
+from typing import Set, List, TypeVar
+
+from mypy.nodes import SymbolTable, SymbolTableNode, FuncBase, TypeInfo, Var
+from mypy.types import (
+    Type, TypeVisitor, UnboundType, TypeList, AnyType, NoneTyp, UninhabitedType,
+    ErasedType, DeletedType, Instance, TypeVarType, CallableType, TupleType, TypedDictType,
+    UnionType, Overloaded, PartialType, TypeType
+)
+
+
+def compare_symbol_tables(name_prefix: str, table1: SymbolTable, table2: SymbolTable) -> Set[str]:
+    """Return names that are different in two versions of a symbol table.
+
+    Return a set of fully-qualified names (e.g., 'mod.func' or 'mod.Class.method').
+    """
+    # Find names only defined only in one version.
+    names1 = {'%s.%s' % (name_prefix, name) for name in table1}
+    names2 = {'%s.%s' % (name_prefix, name) for name in table2}
+    triggers = names1 ^ names2
+
+    # Look for names defined in both versions that are different.
+    for name in set(table1.keys()) & set(table2.keys()):
+        if not is_similar_node_shallow(table1[name], table2[name]):
+            triggers.add('%s.%s' % (name_prefix, name))
+        else:
+            # Nodes are the same when using shallow comparison. Now look into contents of
+            # classes to find changed items.
+            node1 = table1[name].node
+            node2 = table2[name].node
+
+            if node1.fullname() and get_prefix(node1.fullname()) != name_prefix:
+                # Only look inside things defined in the current module.
+                # TODO: This probably doesn't work generally...
+                continue
+
+            if isinstance(node1, TypeInfo) and isinstance(node2, TypeInfo):
+                # TODO: Only do this is the class is defined in this module.
+                prefix = '%s.%s' % (name_prefix, node1.name())
+                triggers |= compare_symbol_tables(prefix, node1.names, node2.names)
+
+    return triggers
+
+
+def is_similar_node_shallow(n: SymbolTableNode, m: SymbolTableNode) -> bool:
+    # TODO:
+    #   cross_ref
+    #   tvar_def
+    #   type_override
+    if (n.kind != m.kind
+            or n.mod_id != m.mod_id
+            or n.module_public != m.module_public):
+        return False
+    if type(n.node) != type(m.node):  # noqa
+        return False
+    if n.node.fullname() != m.node.fullname():
+        return False
+    if isinstance(n.node, FuncBase) and isinstance(m.node, FuncBase):
+        # TODO: info
+        return (n.node.is_property == m.node.is_property and
+                is_identical_type(n.node.type, m.node.type))
+    if isinstance(n.node, TypeInfo) and isinstance(m.node, TypeInfo):
+        # TODO:
+        #   type_vars
+        #   bases
+        #   _promote
+        #   tuple_type
+        #   typeddict_type
+        nn = n.node
+        mn = m.node
+        return (nn.is_abstract == mn.is_abstract and
+                nn.is_enum == mn.is_enum and
+                nn.fallback_to_any == mn.fallback_to_any and
+                nn.is_named_tuple == mn.is_named_tuple and
+                nn.is_newtype == mn.is_newtype and
+                is_same_mro(nn.mro, mn.mro))
+    if isinstance(n.node, Var) and isinstance(m.node, Var):
+        return is_identical_type(n.node.type, m.node.type)
+    return True
+
+
+def is_same_mro(mro1: List[TypeInfo], mro2: List[TypeInfo]) -> bool:
+    return (len(mro1) == len(mro2)
+            and all(x.fullname() == y.fullname() for x, y in zip(mro1, mro2)))
+
+
+def get_prefix(id: str) -> str:
+    """Drop the final component of a qualified name (e.g. ('x.y' -> 'x')."""
+    return id.rsplit('.', 1)[0]
+
+
+def is_identical_type(t: Type, s: Type) -> bool:
+    return t.accept(IdenticalTypeVisitor(s))
+
+
+TT = TypeVar('TT', bound=Type)
+
+
+def is_identical_types(a: List[TT], b: List[TT]) -> bool:
+    return len(a) == len(b) and all(is_identical_type(t, s) for t, s in zip(a, b))
+
+
+class IdenticalTypeVisitor(TypeVisitor[bool]):
+    """Visitor for checking whether two types are identical.
+
+    This may be conservative -- it's okay for two types to be considered
+    different even if they are actually the same. The results are only
+    used to improve performance, not relied on for correctness.
+
+    Differences from mypy.sametypes:
+
+    * Types with the same name but different AST nodes are considered
+      identical.
+
+    * If one of the types is not valid for whatever reason, they are
+      considered different.
+
+    * Sometimes require types to be structurally identical, even if they
+      are semantically the same type.
+    """
+
+    def __init__(self, right: Type) -> None:
+        self.right = right
+
+    # visit_x(left) means: is left (which is an instance of X) the same type as
+    # right?
+
+    def visit_unbound_type(self, left: UnboundType) -> bool:
+        return False
+
+    def visit_any(self, left: AnyType) -> bool:
+        return isinstance(self.right, AnyType)
+
+    def visit_none_type(self, left: NoneTyp) -> bool:
+        return isinstance(self.right, NoneTyp)
+
+    def visit_uninhabited_type(self, t: UninhabitedType) -> bool:
+        return isinstance(self.right, UninhabitedType)
+
+    def visit_erased_type(self, left: ErasedType) -> bool:
+        return False
+
+    def visit_deleted_type(self, left: DeletedType) -> bool:
+        return isinstance(self.right, DeletedType)
+
+    def visit_instance(self, left: Instance) -> bool:
+        return (isinstance(self.right, Instance) and
+                left.type.fullname() == self.right.type.fullname() and
+                is_identical_types(left.args, self.right.args))
+
+    def visit_type_var(self, left: TypeVarType) -> bool:
+        return (isinstance(self.right, TypeVarType) and
+                left.id == self.right.id)
+
+    def visit_callable_type(self, left: CallableType) -> bool:
+        # FIX generics
+        if isinstance(self.right, CallableType):
+            cright = self.right
+            return (is_identical_type(left.ret_type, cright.ret_type) and
+                    is_identical_types(left.arg_types, cright.arg_types) and
+                    left.arg_names == cright.arg_names and
+                    left.arg_kinds == cright.arg_kinds and
+                    left.is_type_obj() == cright.is_type_obj() and
+                    left.is_ellipsis_args == cright.is_ellipsis_args)
+        return False
+
+    def visit_tuple_type(self, left: TupleType) -> bool:
+        if isinstance(self.right, TupleType):
+            return is_identical_types(left.items, self.right.items)
+        return False
+
+    def visit_typeddict_type(self, left: TypedDictType) -> bool:
+        if isinstance(self.right, TypedDictType):
+            if left.items.keys() != self.right.items.keys():
+                return False
+            for (_, left_item_type, right_item_type) in left.zip(self.right):
+                if not is_identical_type(left_item_type, right_item_type):
+                    return False
+            return True
+        return False
+
+    def visit_union_type(self, left: UnionType) -> bool:
+        if isinstance(self.right, UnionType):
+            # Require structurally identical types.
+            return is_identical_types(left.items, self.right.items)
+        return False
+
+    def visit_overloaded(self, left: Overloaded) -> bool:
+        if isinstance(self.right, Overloaded):
+            return is_identical_types(left.items(), self.right.items())
+        return False
+
+    def visit_partial_type(self, left: PartialType) -> bool:
+        # A partial type is not fully defined, so the result is indeterminate. We shouldn't
+        # get here.
+        raise RuntimeError
+
+    def visit_type_type(self, left: TypeType) -> bool:
+        if isinstance(self.right, TypeType):
+            return is_identical_type(left.item, self.right.item)
+        return False
diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py
new file mode 100644
index 0000000..5591c9a
--- /dev/null
+++ b/mypy/server/astmerge.py
@@ -0,0 +1,222 @@
+"""Merge a new version of a module AST to an old version.
+
+See the main entry point merge_asts for details.
+"""
+
+from typing import Dict, List, cast, TypeVar
+
+from mypy.nodes import (
+    Node, MypyFile, SymbolTable, Block, AssignmentStmt, NameExpr, MemberExpr, RefExpr, TypeInfo,
+    FuncDef, ClassDef, SymbolNode, Var, Statement, MDEF
+)
+from mypy.traverser import TraverserVisitor
+from mypy.types import (
+    Type, TypeVisitor, Instance, AnyType, NoneTyp, CallableType, DeletedType, PartialType,
+    TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType,
+    Overloaded
+)
+
+
+def merge_asts(old: MypyFile, old_symbols: SymbolTable,
+               new: MypyFile, new_symbols: SymbolTable) -> None:
+    """Merge a new version of a module AST to a previous version.
+
+    The main idea is to preserve the identities of externally visible
+    nodes in the old AST (that have a corresponding node in the new AST).
+    All old node state (outside identity) will come from the new AST.
+
+    When this returns, 'old' will refer to the merged AST, but 'new_symbols'
+    will be the new symbol table. 'new' and 'old_symbols' will no longer be
+    valid.
+    """
+    assert new.fullname() == old.fullname()
+    replacement_map = replacement_map_from_symbol_table(
+        old_symbols, new_symbols, prefix=old.fullname())
+    replacement_map[new] = old
+    node = replace_nodes_in_ast(new, replacement_map)
+    assert node is old
+    replace_nodes_in_symbol_table(new_symbols, replacement_map)
+
+
+def replacement_map_from_symbol_table(
+        old: SymbolTable, new: SymbolTable, prefix: str) -> Dict[SymbolNode, SymbolNode]:
+    replacements = {}
+    for name, node in old.items():
+        if (name in new and (node.kind == MDEF
+                             or get_prefix(node.node.fullname()) == prefix)):
+            new_node = new[name]
+            if (type(new_node.node) == type(node.node)  # noqa
+                    and new_node.node.fullname() == node.node.fullname()
+                    and new_node.kind == node.kind):
+                replacements[new_node.node] = node.node
+                if isinstance(node.node, TypeInfo) and isinstance(new_node.node, TypeInfo):
+                    type_repl = replacement_map_from_symbol_table(
+                        node.node.names,
+                        new_node.node.names,
+                        prefix)
+                    replacements.update(type_repl)
+    return replacements
+
+
+def replace_nodes_in_ast(node: SymbolNode,
+                         replacements: Dict[SymbolNode, SymbolNode]) -> SymbolNode:
+    visitor = NodeReplaceVisitor(replacements)
+    node.accept(visitor)
+    return replacements.get(node, node)
+
+
+SN = TypeVar('SN', bound=SymbolNode)
+
+
+class NodeReplaceVisitor(TraverserVisitor):
+    """Transform some nodes to new identities in an AST.
+
+    Only nodes that live in the symbol table may be
+    replaced, which simplifies the implementation some.
+    """
+
+    def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None:
+        self.replacements = replacements
+
+    def visit_mypy_file(self, node: MypyFile) -> None:
+        node = self.fixup(node)
+        node.defs = self.replace_statements(node.defs)
+        super().visit_mypy_file(node)
+
+    def visit_block(self, node: Block) -> None:
+        super().visit_block(node)
+        node.body = self.replace_statements(node.body)
+
+    def visit_func_def(self, node: FuncDef) -> None:
+        node = self.fixup(node)
+        if node.type:
+            self.fixup_type(node.type)
+        super().visit_func_def(node)
+
+    def visit_class_def(self, node: ClassDef) -> None:
+        # TODO additional things like the MRO
+        node.defs.body = self.replace_statements(node.defs.body)
+        replace_nodes_in_symbol_table(node.info.names, self.replacements)
+        info = node.info
+        for i, item in enumerate(info.mro):
+            info.mro[i] = self.fixup(info.mro[i])
+        for i, base in enumerate(info.bases):
+            self.fixup_type(info.bases[i])
+        super().visit_class_def(node)
+
+    def visit_assignment_stmt(self, node: AssignmentStmt) -> None:
+        if node.type:
+            self.fixup_type(node.type)
+        super().visit_assignment_stmt(node)
+
+    # Expressions
+
+    def visit_name_expr(self, node: NameExpr) -> None:
+        self.visit_ref_expr(node)
+
+    def visit_member_expr(self, node: MemberExpr) -> None:
+        self.visit_ref_expr(node)
+        super().visit_member_expr(node)
+
+    def visit_ref_expr(self, node: RefExpr) -> None:
+        node.node = self.fixup(node.node)
+
+    # Helpers
+
+    def fixup(self, node: SN) -> SN:
+        if node in self.replacements:
+            new = self.replacements[node]
+            new.__dict__ = node.__dict__
+            return cast(SN, new)
+        return node
+
+    def fixup_type(self, typ: Type) -> None:
+        typ.accept(TypeReplaceVisitor(self.replacements))
+
+    def replace_statements(self, nodes: List[Statement]) -> List[Statement]:
+        result = []
+        for node in nodes:
+            if isinstance(node, SymbolNode):
+                node = self.fixup(node)
+            result.append(node)
+        return result
+
+
+class TypeReplaceVisitor(TypeVisitor[None]):
+    def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None:
+        self.replacements = replacements
+
+    def visit_instance(self, typ: Instance) -> None:
+        typ.type = self.fixup(typ.type)
+        for arg in typ.args:
+            arg.accept(self)
+
+    def visit_any(self, typ: AnyType) -> None:
+        pass
+
+    def visit_none_type(self, typ: NoneTyp) -> None:
+        pass
+
+    def visit_callable_type(self, typ: CallableType) -> None:
+        for arg in typ.arg_types:
+            arg.accept(self)
+        typ.ret_type.accept(self)
+        # TODO: typ.definition
+        # TODO: typ.fallback
+        assert not typ.variables  # TODO
+
+    def visit_overloaded(self, t: Overloaded) -> None:
+        raise NotImplementedError
+
+    def visit_deleted_type(self, typ: DeletedType) -> None:
+        pass
+
+    def visit_partial_type(self, typ: PartialType) -> None:
+        raise RuntimeError
+
+    def visit_tuple_type(self, typ: TupleType) -> None:
+        raise NotImplementedError
+
+    def visit_type_type(self, typ: TypeType) -> None:
+        raise NotImplementedError
+
+    def visit_type_var(self, typ: TypeVarType) -> None:
+        raise NotImplementedError
+
+    def visit_typeddict_type(self, typ: TypedDictType) -> None:
+        raise NotImplementedError
+
+    def visit_unbound_type(self, typ: UnboundType) -> None:
+        raise RuntimeError
+
+    def visit_uninhabited_type(self, typ: UninhabitedType) -> None:
+        pass
+
+    def visit_union_type(self, typ: UnionType) -> None:
+        raise NotImplementedError
+
+    # Helpers
+
+    def fixup(self, node: SN) -> SN:
+        if node in self.replacements:
+            new = self.replacements[node]
+            new.__dict__ = node.__dict__
+            return cast(SN, new)
+        return node
+
+
+def replace_nodes_in_symbol_table(symbols: SymbolTable,
+                                  replacements: Dict[SymbolNode, SymbolNode]) -> None:
+    for name, node in symbols.items():
+        if node.node in replacements:
+            new = replacements[node.node]
+            new.__dict__ = node.node.__dict__
+            node.node = new
+            if isinstance(node.node, Var) and node.node.type:
+                node.node.type.accept(TypeReplaceVisitor(replacements))
+                node.node.info = cast(TypeInfo, replacements.get(node.node.info, node.node.info))
+
+
+def get_prefix(fullname: str) -> str:
+    """Drop the final component of a qualified name (e.g. ('x.y' -> 'x')."""
+    return fullname.rsplit('.', 1)[0]
diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py
new file mode 100644
index 0000000..dd7ae96
--- /dev/null
+++ b/mypy/server/aststrip.py
@@ -0,0 +1,99 @@
+"""Strip AST from semantic information."""
+
+import contextlib
+from typing import Union, Iterator
+
+from mypy.nodes import (
+    Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt,
+    TypeInfo, Var
+)
+from mypy.traverser import TraverserVisitor
+
+
+def strip_target(node: Union[MypyFile, FuncItem]) -> None:
+    NodeStripVisitor().strip_target(node)
+
+
+class NodeStripVisitor(TraverserVisitor):
+    def __init__(self) -> None:
+        self.type = None  # type: TypeInfo
+
+    def strip_target(self, node: Union[MypyFile, FuncItem]) -> None:
+        """Strip a fine-grained incremental mode target."""
+        if isinstance(node, MypyFile):
+            self.strip_top_level(node)
+        else:
+            node.accept(self)
+
+    def strip_top_level(self, file_node: MypyFile) -> None:
+        """Strip a module top-level (don't recursive into functions)."""
+        for node in file_node.defs:
+            if not isinstance(node, (FuncItem, ClassDef)):
+                node.accept(self)
+            elif isinstance(node, ClassDef):
+                self.strip_class_body(node)
+
+    def strip_class_body(self, node: ClassDef) -> None:
+        """Strip class body and type info, but don't strip methods."""
+        node.info.type_vars = []
+        node.info.bases = []
+        node.info.abstract_attributes = []
+        node.info.mro = []
+        node.info.add_type_vars()
+
+    def visit_func_def(self, node: FuncDef) -> None:
+        node.expanded = []
+        node.type = node.unanalyzed_type
+        with self.enter_class(node.info) if node.info else nothing():
+            super().visit_func_def(node)
+
+    @contextlib.contextmanager
+    def enter_class(self, info: TypeInfo) -> Iterator[None]:
+        old = self.type
+        self.type = info
+        yield
+        self.type = old
+
+    def visit_assignment_stmt(self, node: AssignmentStmt) -> None:
+        node.type = node.unanalyzed_type
+        super().visit_assignment_stmt(node)
+
+    def visit_name_expr(self, node: NameExpr) -> None:
+        self.strip_ref_expr(node)
+
+    def visit_member_expr(self, node: MemberExpr) -> None:
+        self.strip_ref_expr(node)
+        if self.is_duplicate_attribute_def(node):
+            # This is marked as a instance variable definition but a base class
+            # defines an attribute with the same name, and we can't have
+            # multiple definitions for an attribute. Defer to the base class
+            # definition.
+            del self.type.names[node.name]
+            node.is_def = False
+            node.def_var = None
+
+    def is_duplicate_attribute_def(self, node: MemberExpr) -> bool:
+        if not node.is_def or node.name not in self.type.names:
+            return False
+        return any(info.get(node.name) is not None for info in self.type.mro[1:])
+
+    def strip_ref_expr(self, node: RefExpr) -> None:
+        node.kind = None
+        node.node = None
+        node.fullname = None
+
+    # TODO: handle more node types
+
+
+def is_self_member_ref(memberexpr: MemberExpr) -> bool:
+    """Does memberexpr refer to an attribute of self?"""
+    # TODO: Merge with is_self_member_ref in semanal.py.
+    if not isinstance(memberexpr.expr, NameExpr):
+        return False
+    node = memberexpr.expr.node
+    return isinstance(node, Var) and node.is_self
+
+
+ at contextlib.contextmanager
+def nothing() -> Iterator[None]:
+    yield
diff --git a/mypy/server/deps.py b/mypy/server/deps.py
new file mode 100644
index 0000000..d167b86
--- /dev/null
+++ b/mypy/server/deps.py
@@ -0,0 +1,232 @@
+"""Generate fine-grained dependencies for AST nodes."""
+
+from typing import Dict, List, Set
+
+from mypy.checkmember import bind_self
+from mypy.nodes import (
+    Node, Expression, MypyFile, FuncDef, ClassDef, AssignmentStmt, NameExpr, MemberExpr, Import,
+    ImportFrom, CallExpr, TypeInfo, Var, LDEF
+)
+from mypy.traverser import TraverserVisitor
+from mypy.types import (
+    Type, Instance, AnyType, NoneTyp, TypeVisitor, CallableType, DeletedType, PartialType,
+    TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType,
+    FunctionLike
+)
+from mypy.server.trigger import make_trigger
+
+
+def get_dependencies(prefix: str, node: Node,
+                     type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]:
+    """Get all dependencies of a node, recursively."""
+    visitor = DependencyVisitor(prefix, type_map)
+    node.accept(visitor)
+    return visitor.map
+
+
+def get_dependencies_of_target(prefix: str, node: Node,
+                               type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]:
+    """Get dependencies of a target -- don't recursive into nested targets."""
+    visitor = DependencyVisitor(prefix, type_map)
+    if isinstance(node, MypyFile):
+        for defn in node.defs:
+            if not isinstance(defn, (ClassDef, FuncDef)):
+                defn.accept(visitor)
+    else:
+        node.accept(visitor)
+    return visitor.map
+
+
+class DependencyVisitor(TraverserVisitor):
+    def __init__(self, prefix: str, type_map: Dict[Expression, Type]) -> None:
+        self.stack = [prefix]
+        self.type_map = type_map
+        self.map = {}  # type: Dict[str, Set[str]]
+        self.is_class = False
+
+    # TODO
+    #   decorated functions
+    #   overloads
+    #   from m import *
+
+    def visit_mypy_file(self, o: MypyFile) -> None:
+        # TODO: Do we need to anything here?
+        super().visit_mypy_file(o)
+
+    def visit_func_def(self, o: FuncDef) -> None:
+        target = self.push(o.name())
+        if o.type:
+            if self.is_class and isinstance(o.type, FunctionLike):
+                signature = bind_self(o.type)  # type: Type
+            else:
+                signature = o.type
+            for trigger in get_type_dependencies(signature):
+                self.add_dependency(trigger)
+                self.add_dependency(trigger, target=make_trigger(target))
+        if o.info:
+            for base in non_trivial_bases(o.info):
+                self.add_dependency(make_trigger(base.fullname() + '.' + o.name()))
+        super().visit_func_def(o)
+        self.pop()
+
+    def visit_class_def(self, o: ClassDef) -> None:
+        target = self.push(o.name)
+        self.add_dependency(make_trigger(target))
+        old_is_class = self.is_class
+        self.is_class = True
+        # TODO: Add dependencies based on MRO and other attributes.
+        super().visit_class_def(o)
+        self.is_class = old_is_class
+        info = o.info
+        for name, node in info.names.items():
+            if isinstance(node.node, Var):
+                for base in non_trivial_bases(info):
+                    # If the type of an attribute changes in a base class, we make references
+                    # to the attribute in the subclass stale.
+                    self.add_dependency(make_trigger(base.fullname() + '.' + name),
+                                        target=make_trigger(info.fullname() + '.' + name))
+        for base in non_trivial_bases(info):
+            for name, node in base.names.items():
+                self.add_dependency(make_trigger(base.fullname() + '.' + name),
+                                    target=make_trigger(info.fullname() + '.' + name))
+            self.add_dependency(make_trigger(base.fullname() + '.__init__'),
+                                target=make_trigger(info.fullname() + '.__init__'))
+        self.pop()
+
+    def visit_import(self, o: Import) -> None:
+        for id, as_id in o.ids:
+            # TODO: as_id
+            self.add_dependency(make_trigger(id), self.current())
+
+    def visit_import_from(self, o: ImportFrom) -> None:
+        assert o.relative == 0  # Relative imports not supported
+        for name, as_name in o.names:
+            assert as_name is None or as_name == name
+            self.add_dependency(make_trigger(o.id + '.' + name))
+
+    def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
+        super().visit_assignment_stmt(o)
+        if o.type:
+            for trigger in get_type_dependencies(o.type):
+                self.add_dependency(trigger)
+
+    # Expressions
+
+    # TODO
+    #   dependency on __init__ (e.g. ClassName())
+    #   super()
+
+    def visit_name_expr(self, o: NameExpr) -> None:
+        if o.kind == LDEF:
+            # We don't track depdendencies to local variables, since they
+            # aren't externally visible.
+            return
+        trigger = make_trigger(o.fullname)
+        self.add_dependency(trigger)
+
+    def visit_member_expr(self, e: MemberExpr) -> None:
+        super().visit_member_expr(e)
+        if e.kind is not None:
+            # Reference to a module attribute
+            trigger = make_trigger(e.fullname)
+            self.add_dependency(trigger)
+        else:
+            # Reference to a non-module attribute
+            typ = self.type_map[e.expr]
+            if isinstance(typ, Instance):
+                member = '%s.%s' % (typ.type.fullname(), e.name)
+                self.add_dependency(make_trigger(member))
+            elif isinstance(typ, (AnyType, NoneTyp)):
+                pass  # No dependency needed
+            elif isinstance(typ, FunctionLike) and typ.is_type_obj():
+                member = '%s.%s' % (typ.type_object().fullname(), e.name)
+                self.add_dependency(make_trigger(member))
+
+    def visit_call_expr(self, e: CallExpr) -> None:
+        super().visit_call_expr(e)
+        callee_type = self.type_map.get(e.callee)
+        if isinstance(callee_type, FunctionLike) and callee_type.is_type_obj():
+            class_name = callee_type.type_object().fullname()
+            self.add_dependency(make_trigger(class_name + '.__init__'))
+
+    # Helpers
+
+    def add_dependency(self, trigger: str, target: str = None) -> None:
+        if target is None:
+            target = self.current()
+        self.map.setdefault(trigger, set()).add(target)
+
+    def push(self, component: str) -> str:
+        target = '%s.%s' % (self.current(), component)
+        self.stack.append(target)
+        return target
+
+    def pop(self) -> None:
+        self.stack.pop()
+
+    def current(self) -> str:
+        return self.stack[-1]
+
+
+def get_type_dependencies(typ: Type) -> List[str]:
+    return typ.accept(TypeDependenciesVisitor())
+
+
+class TypeDependenciesVisitor(TypeVisitor[List[str]]):
+    def __init__(self) -> None:
+        self.deps = []  # type: List[str]
+
+    def visit_instance(self, typ: Instance) -> List[str]:
+        trigger = make_trigger(typ.type.fullname())
+        triggers = [trigger]
+        for arg in typ.args:
+            triggers.extend(get_type_dependencies(arg))
+        return triggers
+
+    def visit_any(self, typ: AnyType) -> List[str]:
+        return []
+
+    def visit_none_type(self, typ: NoneTyp) -> List[str]:
+        return []
+
+    def visit_callable_type(self, typ: CallableType) -> List[str]:
+        # TODO: generic callables
+        triggers = []
+        for arg in typ.arg_types:
+            triggers.extend(get_type_dependencies(arg))
+        triggers.extend(get_type_dependencies(typ.ret_type))
+        return triggers
+
+    def visit_deleted_type(self, typ: DeletedType) -> List[str]:
+        return []
+
+    def visit_partial_type(self, typ: PartialType) -> List[str]:
+        assert False, "Should not see a partial type here"
+
+    def visit_tuple_type(self, typ: TupleType) -> List[str]:
+        raise NotImplementedError
+
+    def visit_type_type(self, typ: TypeType) -> List[str]:
+        # TODO: replace with actual implementation
+        return []
+
+    def visit_type_var(self, typ: TypeVarType) -> List[str]:
+        # TODO: replace with actual implementation
+        return []
+
+    def visit_typeddict_type(self, typ: TypedDictType) -> List[str]:
+        raise NotImplementedError
+
+    def visit_unbound_type(self, typ: UnboundType) -> List[str]:
+        return []
+
+    def visit_uninhabited_type(self, typ: UninhabitedType) -> List[str]:
+        return []
+
+    def visit_union_type(self, typ: UnionType) -> List[str]:
+        raise NotImplementedError
+
+
+def non_trivial_bases(info: TypeInfo) -> List[TypeInfo]:
+    return [base for base in info.mro[1:]
+            if base.fullname() != 'builtins.object']
diff --git a/mypy/server/subexpr.py b/mypy/server/subexpr.py
new file mode 100644
index 0000000..663fff2
--- /dev/null
+++ b/mypy/server/subexpr.py
@@ -0,0 +1,144 @@
+"""Find all subexpressions of an AST node."""
+
+from typing import List
+
+from mypy.nodes import (
+    Expression, Node, MemberExpr, YieldFromExpr, YieldExpr, CallExpr, OpExpr, ComparisonExpr,
+    SliceExpr, CastExpr, RevealTypeExpr, UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr,
+    IndexExpr, GeneratorExpr, ListComprehension, SetComprehension, DictionaryComprehension,
+    ConditionalExpr, TypeApplication, LambdaExpr, StarExpr, BackquoteExpr, AwaitExpr,
+)
+from mypy.traverser import TraverserVisitor
+
+
+def get_subexpressions(node: Node) -> List[Expression]:
+    visitor = SubexpressionFinder()
+    node.accept(visitor)
+    return visitor.expressions
+
+
+class SubexpressionFinder(TraverserVisitor):
+    def __init__(self) -> None:
+        self.expressions = []  # type: List[Expression]
+
+    def _visit_leaf(self, o: Expression) -> None:
+        self.add(o)
+
+    visit_int_expr = _visit_leaf
+    visit_name_expr = _visit_leaf
+    visit_float_expr = _visit_leaf
+    visit_str_expr = _visit_leaf
+    visit_bytes_expr = _visit_leaf
+    visit_unicode_expr = _visit_leaf
+    visit_complex_expr = _visit_leaf
+    visit_ellipsis = _visit_leaf
+    visit_super_expr = _visit_leaf
+    visit_type_var_expr = _visit_leaf
+    visit_type_alias_expr = _visit_leaf
+    visit_namedtuple_expr = _visit_leaf
+    visit_typeddict_expr = _visit_leaf
+    visit__promote_expr = _visit_leaf
+    visit_newtype_expr = _visit_leaf
+
+    def visit_member_expr(self, e: MemberExpr) -> None:
+        self.add(e)
+        super().visit_member_expr(e)
+
+    def visit_yield_from_expr(self, e: YieldFromExpr) -> None:
+        self.add(e)
+        super().visit_yield_from_expr(e)
+
+    def visit_yield_expr(self, e: YieldExpr) -> None:
+        self.add(e)
+        super().visit_yield_expr(e)
+
+    def visit_call_expr(self, e: CallExpr) -> None:
+        self.add(e)
+        super().visit_call_expr(e)
+
+    def visit_op_expr(self, e: OpExpr) -> None:
+        self.add(e)
+        super().visit_op_expr(e)
+
+    def visit_comparison_expr(self, e: ComparisonExpr) -> None:
+        self.add(e)
+        super().visit_comparison_expr(e)
+
+    def visit_slice_expr(self, e: SliceExpr) -> None:
+        self.add(e)
+        super().visit_slice_expr(e)
+
+    def visit_cast_expr(self, e: CastExpr) -> None:
+        self.add(e)
+        super().visit_cast_expr(e)
+
+    def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None:
+        self.add(e)
+        super().visit_reveal_type_expr(e)
+
+    def visit_unary_expr(self, e: UnaryExpr) -> None:
+        self.add(e)
+        super().visit_unary_expr(e)
+
+    def visit_list_expr(self, e: ListExpr) -> None:
+        self.add(e)
+        super().visit_list_expr(e)
+
+    def visit_tuple_expr(self, e: TupleExpr) -> None:
+        self.add(e)
+        super().visit_tuple_expr(e)
+
+    def visit_dict_expr(self, e: DictExpr) -> None:
+        self.add(e)
+        super().visit_dict_expr(e)
+
+    def visit_set_expr(self, e: SetExpr) -> None:
+        self.add(e)
+        super().visit_set_expr(e)
+
+    def visit_index_expr(self, e: IndexExpr) -> None:
+        self.add(e)
+        super().visit_index_expr(e)
+
+    def visit_generator_expr(self, e: GeneratorExpr) -> None:
+        self.add(e)
+        super().visit_generator_expr(e)
+
+    def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None:
+        self.add(e)
+        super().visit_dictionary_comprehension(e)
+
+    def visit_list_comprehension(self, e: ListComprehension) -> None:
+        self.add(e)
+        super().visit_list_comprehension(e)
+
+    def visit_set_comprehension(self, e: SetComprehension) -> None:
+        self.add(e)
+        super().visit_set_comprehension(e)
+
+    def visit_conditional_expr(self, e: ConditionalExpr) -> None:
+        self.add(e)
+        super().visit_conditional_expr(e)
+
+    def visit_type_application(self, e: TypeApplication) -> None:
+        self.add(e)
+        super().visit_type_application(e)
+
+    def visit_lambda_expr(self, e: LambdaExpr) -> None:
+        self.add(e)
+        super().visit_lambda_expr(e)
+
+    def visit_star_expr(self, e: StarExpr) -> None:
+        self.add(e)
+        super().visit_star_expr(e)
+
+    def visit_backquote_expr(self, e: BackquoteExpr) -> None:
+        self.add(e)
+        super().visit_backquote_expr(e)
+
+    def visit_await_expr(self, e: AwaitExpr) -> None:
+        self.add(e)
+        super().visit_await_expr(e)
+
+    def add(self, e: Expression) -> None:
+        self.expressions.append(e)
diff --git a/mypy/server/target.py b/mypy/server/target.py
new file mode 100644
index 0000000..0b4636b
--- /dev/null
+++ b/mypy/server/target.py
@@ -0,0 +1,17 @@
+from typing import Iterable, Tuple, List
+
+
+def module_prefix(modules: Iterable[str], target: str) -> str:
+    return split_target(modules, target)[0]
+
+
+def split_target(modules: Iterable[str], target: str) -> Tuple[str, str]:
+    remaining = []  # type: List[str]
+    while True:
+        if target in modules:
+            return target, '.'.join(remaining)
+        components = target.rsplit('.', 1)
+        if len(components) == 1:
+            assert False, 'Cannot find module prefix for {}'.format(target)
+        target = components[0]
+        remaining.insert(0, components[1])
diff --git a/mypy/server/trigger.py b/mypy/server/trigger.py
new file mode 100644
index 0000000..2c161f5
--- /dev/null
+++ b/mypy/server/trigger.py
@@ -0,0 +1,5 @@
+"""AST triggers that are used for fine-grained dependency handling."""
+
+
+def make_trigger(name: str) -> str:
+    return '<%s>' % name
diff --git a/mypy/server/update.py b/mypy/server/update.py
new file mode 100644
index 0000000..8394707
--- /dev/null
+++ b/mypy/server/update.py
@@ -0,0 +1,425 @@
+"""Update build result by incrementally processing changed modules.
+
+Use fine-grained dependencies to update targets in other modules that
+may be affected by externally-visible changes in the changed modules.
+
+Terms:
+
+* A 'target' is a function definition or the top level of a module. We
+  refer to targets using their fully qualified name (e.g. 'mod.Cls.attr').
+  Targets are the smallest units of processing during fine-grained
+  incremental checking.
+* A 'trigger' represents the properties of a part of a program, and it
+  gets triggered/activated when these properties change. For example,
+  '<mod.func>' refers to a module-level function, and it gets triggered
+  if the signature of the function changes, or if if the function is
+  removed.
+
+Some program state is maintained across multiple build increments:
+
+* The full ASTs of all modules in memory all the time (+ type map).
+* Maintain a fine-grained dependency map, which is from triggers to
+  targets/triggers. The latter determine what other parts of a program
+  need to be processed again due to an externally visible change to a
+  module.
+
+We perform a fine-grained incremental program update like this:
+
+* Determine which modules have changes in their source code since the
+  previous build.
+* Fully process these modules, creating new ASTs and symbol tables
+  for them. Retain the existing ASTs and symbol tables of modules that
+  have no changes in their source code.
+* Determine which parts of the changed modules have changed. The result
+  is a set of triggered triggers.
+* Using the dependency map, decide which other targets have become
+  stale and need to be reprocessed.
+* Replace old ASTs of the modules that we reprocessed earlier with
+  the new ones, but try to retain the identities of original externally
+  visible AST nodes so that we don't (always) need to patch references
+  in the rest of the program.
+* Semantically analyze and type check the stale targets.
+* Repeat the previous steps until nothing externally visible has changed.
+
+Major todo items:
+
+- Support multiple type checking passes
+"""
+
+from typing import Dict, List, Set, Tuple, Iterable, Union
+
+from mypy.build import BuildManager, State
+from mypy.checker import DeferredNode
+from mypy.errors import Errors
+from mypy.nodes import (
+    MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var, FuncBase, ClassDef
+)
+from mypy.types import Type
+from mypy.server.astdiff import compare_symbol_tables, is_identical_type
+from mypy.server.astmerge import merge_asts
+from mypy.server.aststrip import strip_target
+from mypy.server.deps import get_dependencies, get_dependencies_of_target
+from mypy.server.target import module_prefix, split_target
+from mypy.server.trigger import make_trigger
+
+
+# If True, print out debug logging output.
+DEBUG = False
+
+
+class FineGrainedBuildManager:
+    def __init__(self,
+                 manager: BuildManager,
+                 graph: Dict[str, State]) -> None:
+        self.manager = manager
+        self.graph = graph
+        self.deps = get_all_dependencies(manager, graph)
+        self.previous_targets_with_errors = manager.errors.targets()
+
+    def update(self, changed_modules: List[str]) -> List[str]:
+        """Update previous build result by processing changed modules.
+
+        Also propagate changes to other modules as needed, but only process
+        those parts of other modules that are affected by the changes. Retain
+        the existing ASTs and symbol tables of unaffected modules.
+
+        TODO: What about blocking errors?
+
+        Args:
+            manager: State of the build
+            graph: Additional state of the build
+            deps: Fine-grained dependcy map for the build (mutated by this function)
+            changed_modules: Modules changed since the previous update/build (assume
+                this is correct; not validated here)
+
+        Returns:
+            A list of errors.
+        """
+        if DEBUG:
+            print('==== update ====')
+        manager = self.manager
+        graph = self.graph
+        old_modules = dict(manager.modules)
+        manager.errors.reset()
+        new_modules, new_type_maps = build_incremental_step(manager, changed_modules)
+        # TODO: What to do with stale dependencies?
+        triggered = calculate_active_triggers(manager, old_modules, new_modules)
+        if DEBUG:
+            print('triggered:', sorted(triggered))
+        replace_modules_with_new_variants(manager, graph, old_modules, new_modules, new_type_maps)
+        update_dependencies(new_modules, self.deps, graph)
+        propagate_changes_using_dependencies(manager, graph, self.deps, triggered,
+                                             set(changed_modules),
+                                             self.previous_targets_with_errors,
+                                             graph)
+        self.previous_targets_with_errors = manager.errors.targets()
+        return manager.errors.messages()
+
+
+def get_all_dependencies(manager: BuildManager, graph: Dict[str, State]) -> Dict[str, Set[str]]:
+    """Return the fine-grained dependency map for an entire build."""
+    deps = {}  # type: Dict[str, Set[str]]
+    update_dependencies(manager.modules, deps, graph)
+    return deps
+
+
+def build_incremental_step(manager: BuildManager,
+                           changed_modules: List[str]) -> Tuple[Dict[str, MypyFile],
+                                                                Dict[str, Dict[Expression, Type]]]:
+    """Build new versions of changed modules only.
+
+    Return the new ASTs for the changed modules. They will be totally
+    separate from the existing ASTs and need to merged afterwards.
+    """
+    assert len(changed_modules) == 1
+    id = changed_modules[0]
+    path = manager.modules[id].path
+
+    # TODO: what if file is missing?
+    with open(path) as f:
+        source = f.read()
+
+    state = State(id=id,
+                  path=path,
+                  source=source,
+                  manager=manager)  # TODO: more args?
+    state.parse_file()
+    # TODO: state.fix_suppressed_dependencies()?
+    state.semantic_analysis()
+    state.semantic_analysis_pass_three()
+    # TODO: state.semantic_analysis_apply_patches()
+    state.type_check_first_pass()
+    # TODO: state.type_check_second_pass()?
+    state.finish_passes()
+    # TODO: state.write_cache()?
+    # TODO: state.mark_as_rechecked()?
+
+    return {id: state.tree}, {id: state.type_checker.type_map}
+
+
+def update_dependencies(new_modules: Dict[str, MypyFile],
+                        deps: Dict[str, Set[str]],
+                        graph: Dict[str, State]) -> None:
+    for id, node in new_modules.items():
+        module_deps = get_dependencies(prefix=id,
+                                       node=node,
+                                       type_map=graph[id].type_checker.type_map)
+        for trigger, targets in module_deps.items():
+            deps.setdefault(trigger, set()).update(targets)
+
+
+def calculate_active_triggers(manager: BuildManager,
+                              old_modules: Dict[str, MypyFile],
+                              new_modules: Dict[str, MypyFile]) -> Set[str]:
+    """Determine activated triggers by comparing old and new symbol tables.
+
+    For example, if only the signature of function m.f is different in the new
+    symbol table, return {'<m.f>'}.
+    """
+    names = set()  # type: Set[str]
+    for id in new_modules:
+        names |= compare_symbol_tables(id, old_modules[id].names, new_modules[id].names)
+    return {make_trigger(name) for name in names}
+
+
+def replace_modules_with_new_variants(
+        manager: BuildManager,
+        graph: Dict[str, State],
+        old_modules: Dict[str, MypyFile],
+        new_modules: Dict[str, MypyFile],
+        new_type_maps: Dict[str, Dict[Expression, Type]]) -> None:
+    """Replace modules with newly builds versions.
+
+    Retain the identities of externally visible AST nodes in the
+    old ASTs so that references to the affected modules from other
+    modules will still be valid (unless something was deleted or
+    replaced with an incompatible definition, in which case there
+    will be dangling references that will be handled by
+    propagate_changes_using_dependencies).
+    """
+    for id in new_modules:
+        merge_asts(old_modules[id], old_modules[id].names,
+                   new_modules[id], new_modules[id].names)
+        manager.modules[id] = old_modules[id]
+        graph[id].type_checker.type_map = new_type_maps[id]
+
+
+def propagate_changes_using_dependencies(
+        manager: BuildManager,
+        graph: Dict[str, State],
+        deps: Dict[str, Set[str]],
+        triggered: Set[str],
+        up_to_date_modules: Set[str],
+        targets_with_errors: Set[str],
+        modules: Iterable[str]) -> None:
+    # TODO: Multiple type checking passes
+    # TODO: Restrict the number of iterations to some maximum to avoid infinite loops
+
+    # Propagate changes until nothing visible has changed during the last
+    # iteration.
+    while triggered or targets_with_errors:
+        todo = find_targets_recursive(triggered, deps, manager.modules, up_to_date_modules)
+        # Also process targets that used to have errors, as otherwise some
+        # errors might be lost.
+        for target in targets_with_errors:
+            id = module_prefix(modules, target)
+            if id not in up_to_date_modules:
+                if id not in todo:
+                    todo[id] = set()
+                if DEBUG:
+                    print('process', target)
+                todo[id].update(lookup_target(manager.modules, target))
+        triggered = set()
+        # TODO: Preserve order (set is not optimal)
+        for id, nodes in sorted(todo.items(), key=lambda x: x[0]):
+            assert id not in up_to_date_modules
+            triggered |= reprocess_nodes(manager, graph, id, nodes, deps)
+        # Changes elsewhere may require us to reprocess modules that were
+        # previously considered up to date. For example, there may be a
+        # dependency loop that loops back to an originally processed module.
+        up_to_date_modules = set()
+        targets_with_errors = set()
+        if DEBUG:
+            print('triggered:', list(triggered))
+
+
+def find_targets_recursive(
+        triggers: Set[str],
+        deps: Dict[str, Set[str]],
+        modules: Dict[str, MypyFile],
+        up_to_date_modules: Set[str]) -> Dict[str, Set[DeferredNode]]:
+    """Find names of all targets that need to reprocessed, given some triggers.
+
+    Returns: Dictionary from module id to a set of stale targets.
+    """
+    result = {}  # type: Dict[str, Set[DeferredNode]]
+    worklist = triggers
+    processed = set()  # type: Set[str]
+
+    # Find AST nodes corresponding to each target.
+    #
+    # TODO: Don't rely on a set, since the items are in an unpredictable order.
+    while worklist:
+        processed |= worklist
+        current = worklist
+        worklist = set()
+        for target in current:
+            if target.startswith('<'):
+                worklist |= deps.get(target, set()) - processed
+            else:
+                module_id = module_prefix(modules, target)
+                if module_id in up_to_date_modules:
+                    # Already processed.
+                    continue
+                if module_id not in result:
+                    result[module_id] = set()
+                if DEBUG:
+                    print('process', target)
+                deferred = lookup_target(modules, target)
+                result[module_id].update(deferred)
+
+    return result
+
+
+def reprocess_nodes(manager: BuildManager,
+                    graph: Dict[str, State],
+                    module_id: str,
+                    nodeset: Set[DeferredNode],
+                    deps: Dict[str, Set[str]]) -> Set[str]:
+    """Reprocess a set of nodes within a single module.
+
+    Return fired triggers.
+    """
+    file_node = manager.modules[module_id]
+
+    def key(node: DeferredNode) -> str:
+        fullname = node.node.fullname()
+        if isinstance(node.node, FuncDef) and fullname is None:
+            assert node.node.info is not None
+            fullname = '%s.%s' % (node.node.info.fullname(), node.node.name())
+        return fullname
+
+    # Some nodes by full name so that the order of processing is deterministic.
+    nodes = sorted(nodeset, key=key)
+
+    # Strip semantic analysis information.
+    for deferred in nodes:
+        strip_target(deferred.node)
+    semantic_analyzer = manager.semantic_analyzer
+
+    # Second pass of semantic analysis. We don't redo the first pass, because it only
+    # does local things that won't go stale.
+    for deferred in nodes:
+        with semantic_analyzer.file_context(
+                file_node=file_node,
+                fnam=file_node.path,
+                options=manager.options,
+                active_type=deferred.active_typeinfo):
+            manager.semantic_analyzer.refresh_partial(deferred.node)
+
+    # Third pass of semantic analysis.
+    for deferred in nodes:
+        with semantic_analyzer.file_context(
+                file_node=file_node,
+                fnam=file_node.path,
+                options=manager.options,
+                active_type=deferred.active_typeinfo):
+            manager.semantic_analyzer_pass3.refresh_partial(deferred.node)
+
+    # Keep track of potentially affected attribute types before type checking.
+    old_types_map = get_enclosing_namespace_types(nodes)
+
+    # Type check.
+    graph[module_id].type_checker.check_second_pass(nodes)  # TODO: check return value
+
+    # Check if any attribute types were changed and need to be propagated further.
+    new_triggered = get_triggered_namespace_items(old_types_map)
+
+    # Dependencies may have changed.
+    update_deps(module_id, nodes, graph, deps)
+
+    return new_triggered
+
+
+NamespaceNode = Union[TypeInfo, MypyFile]
+
+
+def get_enclosing_namespace_types(nodes: List[DeferredNode]) -> Dict[NamespaceNode,
+                                                                     Dict[str, Type]]:
+    types = {}  # type: Dict[NamespaceNode, Dict[str, Type]]
+    for deferred in nodes:
+        info = deferred.active_typeinfo
+        if info:
+            target = info  # type: NamespaceNode
+        elif isinstance(deferred.node, MypyFile):
+            target = deferred.node
+        else:
+            target = None
+        if target and target not in types:
+            local_types = {name: node.node.type
+                         for name, node in target.names.items()
+                         if isinstance(node.node, Var)}
+            types[target] = local_types
+    return types
+
+
+def get_triggered_namespace_items(old_types_map: Dict[NamespaceNode, Dict[str, Type]]) -> Set[str]:
+    new_triggered = set()
+    for namespace_node, old_types in old_types_map.items():
+        for name, node in namespace_node.names.items():
+            if (name in old_types and
+                    (not isinstance(node.node, Var) or
+                     not is_identical_type(node.node.type, old_types[name]))):
+                # Type checking a method changed an attribute type.
+                new_triggered.add(make_trigger('{}.{}'.format(namespace_node.fullname(), name)))
+    return new_triggered
+
+
+def update_deps(module_id: str,
+                nodes: List[DeferredNode],
+                graph: Dict[str, State],
+                deps: Dict[str, Set[str]]) -> None:
+    for deferred in nodes:
+        node = deferred.node
+        prefix = module_id
+        if isinstance(node, FuncBase) and node.info:
+            prefix += '.{}'.format(node.info.name())
+        type_map = graph[module_id].type_checker.type_map
+        new_deps = get_dependencies_of_target(prefix, node, type_map)
+        for trigger, targets in new_deps.items():
+            deps.setdefault(trigger, set()).update(targets)
+
+
+def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNode]:
+    """Look up a target by fully-qualified name."""
+    module, rest = split_target(modules, target)
+    if rest:
+        components = rest.split('.')
+    else:
+        components = []
+    node = modules[module]  # type: SymbolNode
+    file = None  # type: MypyFile
+    active_class = None
+    active_class_name = None
+    for c in components:
+        if isinstance(node, TypeInfo):
+            active_class = node
+            active_class_name = node.name()
+        # TODO: Is it possible for the assertion to fail?
+        if isinstance(node, MypyFile):
+            file = node
+        assert isinstance(node, (MypyFile, TypeInfo))
+        node = node.names[c].node
+    if isinstance(node, TypeInfo):
+        # A ClassDef target covers the body of the class and everything defined
+        # within it.  To get the body we include the entire surrounding target,
+        # typically a module top-level, since we don't support processing class
+        # bodies as separate entitites for simplicity.
+        result = [DeferredNode(file, None, None)]
+        for name, symnode in node.names.items():
+            node = symnode.node
+            if isinstance(node, FuncDef):
+                result.extend(lookup_target(modules, target + '.' + name))
+        return result
+    assert isinstance(node, (FuncDef, MypyFile))
+    return [DeferredNode(node, active_class_name, active_class)]
diff --git a/mypy/test/__init__.py b/mypy/test/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/mypy/test/collect.py b/mypy/test/collect.py
new file mode 100644
index 0000000..e69de29
diff --git a/mypy/test/config.py b/mypy/test/config.py
new file mode 100644
index 0000000..681f866
--- /dev/null
+++ b/mypy/test/config.py
@@ -0,0 +1,19 @@
+import os
+import os.path
+
+import typing
+
+
+this_file_dir = os.path.dirname(os.path.realpath(__file__))
+PREFIX = os.path.dirname(os.path.dirname(this_file_dir))
+
+# Location of test data files such as test case descriptions.
+test_data_prefix = os.path.join(PREFIX, 'test-data', 'unit')
+
+assert os.path.isdir(test_data_prefix), \
+    'Test data prefix ({}) not set correctly'.format(test_data_prefix)
+
+# Temp directory used for the temp files created when running test cases.
+# This is *within* the tempfile.TemporaryDirectory that is chroot'ed per testcase.
+# It is also hard-coded in numerous places, so don't change it.
+test_temp_dir = 'tmp'
diff --git a/mypy/test/data.py b/mypy/test/data.py
new file mode 100644
index 0000000..38adf18
--- /dev/null
+++ b/mypy/test/data.py
@@ -0,0 +1,564 @@
+"""Utilities for processing .test files containing test case descriptions."""
+
+import os.path
+import os
+import posixpath
+import re
+from os import remove, rmdir
+import shutil
+
+import pytest  # type: ignore  # no pytest in typeshed
+from typing import Callable, List, Tuple, Set, Optional, Iterator, Any, Dict
+
+from mypy.myunit import TestCase, SkipTestCaseException
+
+
+root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
+
+
+def parse_test_cases(
+        path: str,
+        perform: Optional[Callable[['DataDrivenTestCase'], None]],
+        base_path: str = '.',
+        optional_out: bool = False,
+        include_path: str = None,
+        native_sep: bool = False) -> List['DataDrivenTestCase']:
+    """Parse a file with test case descriptions.
+
+    Return an array of test cases.
+
+    NB this function and DataDrivenTestCase are shared between the
+    myunit and pytest codepaths -- if something looks redundant,
+    that's likely the reason.
+    """
+    if native_sep:
+        join = os.path.join
+    else:
+        join = posixpath.join  # type: ignore
+    if not include_path:
+        include_path = os.path.dirname(path)
+    with open(path, encoding='utf-8') as f:
+        l = f.readlines()
+    for i in range(len(l)):
+        l[i] = l[i].rstrip('\n')
+    p = parse_test_data(l, path)
+    out = []  # type: List[DataDrivenTestCase]
+
+    # Process the parsed items. Each item has a header of form [id args],
+    # optionally followed by lines of text.
+    i = 0
+    while i < len(p):
+        ok = False
+        i0 = i
+        if p[i].id == 'case':
+            i += 1
+
+            files = []  # type: List[Tuple[str, str]] # path and contents
+            output_files = []  # type: List[Tuple[str, str]] # path and contents for output files
+            tcout = []  # type: List[str]  # Regular output errors
+            tcout2 = {}  # type: Dict[int, List[str]]  # Output errors for incremental, runs 2+
+            deleted_paths = {}  # type: Dict[int, Set[str]]  # from run number of paths
+            stale_modules = {}  # type: Dict[int, Set[str]]  # from run number to module names
+            rechecked_modules = {}  # type: Dict[ int, Set[str]]  # from run number module names
+            while i < len(p) and p[i].id != 'case':
+                if p[i].id == 'file' or p[i].id == 'outfile':
+                    # Record an extra file needed for the test case.
+                    arg = p[i].arg
+                    assert arg is not None
+                    contents = '\n'.join(p[i].data)
+                    contents = expand_variables(contents)
+                    file_entry = (join(base_path, arg), contents)
+                    if p[i].id == 'file':
+                        files.append(file_entry)
+                    elif p[i].id == 'outfile':
+                        output_files.append(file_entry)
+                elif p[i].id in ('builtins', 'builtins_py2'):
+                    # Use an alternative stub file for the builtins module.
+                    arg = p[i].arg
+                    assert arg is not None
+                    mpath = join(os.path.dirname(path), arg)
+                    if p[i].id == 'builtins':
+                        fnam = 'builtins.pyi'
+                    else:
+                        # Python 2
+                        fnam = '__builtin__.pyi'
+                    with open(mpath) as f:
+                        files.append((join(base_path, fnam), f.read()))
+                elif p[i].id == 'typing':
+                    # Use an alternative stub file for the typing module.
+                    arg = p[i].arg
+                    assert arg is not None
+                    src_path = join(os.path.dirname(path), arg)
+                    with open(src_path) as f:
+                        files.append((join(base_path, 'typing.pyi'), f.read()))
+                elif re.match(r'stale[0-9]*$', p[i].id):
+                    if p[i].id == 'stale':
+                        passnum = 1
+                    else:
+                        passnum = int(p[i].id[len('stale'):])
+                        assert passnum > 0
+                    arg = p[i].arg
+                    if arg is None:
+                        stale_modules[passnum] = set()
+                    else:
+                        stale_modules[passnum] = {item.strip() for item in arg.split(',')}
+                elif re.match(r'rechecked[0-9]*$', p[i].id):
+                    if p[i].id == 'rechecked':
+                        passnum = 1
+                    else:
+                        passnum = int(p[i].id[len('rechecked'):])
+                    arg = p[i].arg
+                    if arg is None:
+                        rechecked_modules[passnum] = set()
+                    else:
+                        rechecked_modules[passnum] = {item.strip() for item in arg.split(',')}
+                elif p[i].id == 'delete':
+                    # File to delete during a multi-step test case
+                    arg = p[i].arg
+                    assert arg is not None
+                    m = re.match(r'(.*)\.([0-9]+)$', arg)
+                    assert m, 'Invalid delete section: {}'.format(arg)
+                    num = int(m.group(2))
+                    assert num >= 2, "Can't delete during step {}".format(num)
+                    full = join(base_path, m.group(1))
+                    deleted_paths.setdefault(num, set()).add(full)
+                elif p[i].id == 'out' or p[i].id == 'out1':
+                    tcout = p[i].data
+                    tcout = [expand_variables(line) for line in tcout]
+                    if os.path.sep == '\\':
+                        tcout = [fix_win_path(line) for line in tcout]
+                    ok = True
+                elif re.match(r'out[0-9]*$', p[i].id):
+                    passnum = int(p[i].id[3:])
+                    assert passnum > 1
+                    output = p[i].data
+                    output = [expand_variables(line) for line in output]
+                    if native_sep and os.path.sep == '\\':
+                        output = [fix_win_path(line) for line in output]
+                    tcout2[passnum] = output
+                    ok = True
+                else:
+                    raise ValueError(
+                        'Invalid section header {} in {} at line {}'.format(
+                            p[i].id, path, p[i].line))
+                i += 1
+
+            for passnum in stale_modules.keys():
+                if passnum not in rechecked_modules:
+                    # If the set of rechecked modules isn't specified, make it the same as the set
+                    # of modules with a stale public interface.
+                    rechecked_modules[passnum] = stale_modules[passnum]
+                if (passnum in stale_modules
+                        and passnum in rechecked_modules
+                        and not stale_modules[passnum].issubset(rechecked_modules[passnum])):
+                    raise ValueError(
+                        ('Stale modules after pass {} must be a subset of rechecked '
+                         'modules ({}:{})').format(passnum, path, p[i0].line))
+
+            if optional_out:
+                ok = True
+
+            if ok:
+                input = expand_includes(p[i0].data, include_path)
+                expand_errors(input, tcout, 'main')
+                for file_path, contents in files:
+                    expand_errors(contents.split('\n'), tcout, file_path)
+                lastline = p[i].line if i < len(p) else p[i - 1].line + 9999
+                arg0 = p[i0].arg
+                assert arg0 is not None
+                tc = DataDrivenTestCase(arg0, input, tcout, tcout2, path,
+                                        p[i0].line, lastline, perform,
+                                        files, output_files, stale_modules,
+                                        rechecked_modules, deleted_paths, native_sep)
+                out.append(tc)
+        if not ok:
+            raise ValueError(
+                '{}, line {}: Error in test case description'.format(
+                    path, p[i0].line))
+
+    return out
+
+
+class DataDrivenTestCase(TestCase):
+    input = None  # type: List[str]
+    output = None  # type: List[str]  # Output for the first pass
+    output2 = None  # type: Dict[int, List[str]]  # Output for runs 2+, indexed by run number
+
+    file = ''
+    line = 0
+
+    # (file path, file content) tuples
+    files = None  # type: List[Tuple[str, str]]
+    expected_stale_modules = None  # type: Dict[int, Set[str]]
+    expected_rechecked_modules = None  # type: Dict[int, Set[str]]
+
+    clean_up = None  # type: List[Tuple[bool, str]]
+
+    def __init__(self,
+                 name: str,
+                 input: List[str],
+                 output: List[str],
+                 output2: Dict[int, List[str]],
+                 file: str,
+                 line: int,
+                 lastline: int,
+                 perform: Optional[Callable[['DataDrivenTestCase'], None]],
+                 files: List[Tuple[str, str]],
+                 output_files: List[Tuple[str, str]],
+                 expected_stale_modules: Dict[int, Set[str]],
+                 expected_rechecked_modules: Dict[int, Set[str]],
+                 deleted_paths: Dict[int, Set[str]],
+                 native_sep: bool = False,
+                 ) -> None:
+        super().__init__(name)
+        self.input = input
+        self.output = output
+        self.output2 = output2
+        self.lastline = lastline
+        self.file = file
+        self.line = line
+        self.perform = perform
+        self.files = files
+        self.output_files = output_files
+        self.expected_stale_modules = expected_stale_modules
+        self.expected_rechecked_modules = expected_rechecked_modules
+        self.deleted_paths = deleted_paths
+        self.native_sep = native_sep
+
+    def set_up(self) -> None:
+        super().set_up()
+        encountered_files = set()
+        self.clean_up = []
+        all_deleted = []  # type: List[str]
+        for paths in self.deleted_paths.values():
+            all_deleted += paths
+        for path, content in self.files:
+            dir = os.path.dirname(path)
+            for d in self.add_dirs(dir):
+                self.clean_up.append((True, d))
+            with open(path, 'w') as f:
+                f.write(content)
+            if path not in all_deleted:
+                # TODO: Don't assume that deleted files don't get reintroduced.
+                self.clean_up.append((False, path))
+            encountered_files.add(path)
+            if re.search(r'\.[2-9]$', path):
+                # Make sure new files introduced in the second and later runs are accounted for
+                renamed_path = path[:-2]
+                if renamed_path not in encountered_files and renamed_path not in all_deleted:
+                    encountered_files.add(renamed_path)
+                    self.clean_up.append((False, renamed_path))
+        for path, _ in self.output_files:
+            # Create directories for expected output and mark them to be cleaned up at the end
+            # of the test case.
+            dir = os.path.dirname(path)
+            for d in self.add_dirs(dir):
+                self.clean_up.append((True, d))
+            self.clean_up.append((False, path))
+
+    def add_dirs(self, dir: str) -> List[str]:
+        """Add all subdirectories required to create dir.
+
+        Return an array of the created directories in the order of creation.
+        """
+        if dir == '' or os.path.isdir(dir):
+            return []
+        else:
+            dirs = self.add_dirs(os.path.dirname(dir)) + [dir]
+            os.mkdir(dir)
+            return dirs
+
+    def run(self) -> None:
+        if self.name.endswith('-skip'):
+            raise SkipTestCaseException()
+        else:
+            assert self.perform is not None, 'Tests without `perform` should not be `run`'
+            self.perform(self)
+
+    def tear_down(self) -> None:
+        # First remove files.
+        for is_dir, path in reversed(self.clean_up):
+            if not is_dir:
+                remove(path)
+        # Then remove directories.
+        for is_dir, path in reversed(self.clean_up):
+            if is_dir:
+                pycache = os.path.join(path, '__pycache__')
+                if os.path.isdir(pycache):
+                    shutil.rmtree(pycache)
+                try:
+                    rmdir(path)
+                except OSError as error:
+                    print(' ** Error removing directory %s -- contents:' % path)
+                    for item in os.listdir(path):
+                        print('  ', item)
+                    # Most likely, there are some files in the
+                    # directory. Use rmtree to nuke the directory, but
+                    # fail the test case anyway, since this seems like
+                    # a bug in a test case -- we shouldn't leave
+                    # garbage lying around. By nuking the directory,
+                    # the next test run hopefully passes.
+                    path = error.filename
+                    # Be defensive -- only call rmtree if we're sure we aren't removing anything
+                    # valuable.
+                    if path.startswith('tmp/') and os.path.isdir(path):
+                        shutil.rmtree(path)
+                    raise
+        super().tear_down()
+
+
+class TestItem:
+    """Parsed test caseitem.
+
+    An item is of the form
+      [id arg]
+      .. data ..
+    """
+
+    id = ''
+    arg = ''  # type: Optional[str]
+
+    # Text data, array of 8-bit strings
+    data = None  # type: List[str]
+
+    file = ''
+    line = 0  # Line number in file
+
+    def __init__(self, id: str, arg: Optional[str], data: List[str], file: str,
+                 line: int) -> None:
+        self.id = id
+        self.arg = arg
+        self.data = data
+        self.file = file
+        self.line = line
+
+
+def parse_test_data(l: List[str], fnam: str) -> List[TestItem]:
+    """Parse a list of lines that represent a sequence of test items."""
+
+    ret = []  # type: List[TestItem]
+    data = []  # type: List[str]
+
+    id = None  # type: Optional[str]
+    arg = None  # type: Optional[str]
+
+    i = 0
+    i0 = 0
+    while i < len(l):
+        s = l[i].strip()
+
+        if l[i].startswith('[') and s.endswith(']') and not s.startswith('[['):
+            if id:
+                data = collapse_line_continuation(data)
+                data = strip_list(data)
+                ret.append(TestItem(id, arg, strip_list(data), fnam, i0 + 1))
+            i0 = i
+            id = s[1:-1]
+            arg = None
+            if ' ' in id:
+                arg = id[id.index(' ') + 1:]
+                id = id[:id.index(' ')]
+            data = []
+        elif l[i].startswith('[['):
+            data.append(l[i][1:])
+        elif not l[i].startswith('--'):
+            data.append(l[i])
+        elif l[i].startswith('----'):
+            data.append(l[i][2:])
+        i += 1
+
+    # Process the last item.
+    if id:
+        data = collapse_line_continuation(data)
+        data = strip_list(data)
+        ret.append(TestItem(id, arg, data, fnam, i0 + 1))
+
+    return ret
+
+
+def strip_list(l: List[str]) -> List[str]:
+    """Return a stripped copy of l.
+
+    Strip whitespace at the end of all lines, and strip all empty
+    lines from the end of the array.
+    """
+
+    r = []  # type: List[str]
+    for s in l:
+        # Strip spaces at end of line
+        r.append(re.sub(r'\s+$', '', s))
+
+    while len(r) > 0 and r[-1] == '':
+        r.pop()
+
+    return r
+
+
+def collapse_line_continuation(l: List[str]) -> List[str]:
+    r = []  # type: List[str]
+    cont = False
+    for s in l:
+        ss = re.sub(r'\\$', '', s)
+        if cont:
+            r[-1] += re.sub('^ +', '', ss)
+        else:
+            r.append(ss)
+        cont = s.endswith('\\')
+    return r
+
+
+def expand_includes(a: List[str], base_path: str) -> List[str]:
+    """Expand @includes within a list of lines.
+
+    Replace all lies starting with @include with the contents of the
+    file name following the prefix. Look for the files in base_path.
+    """
+
+    res = []  # type: List[str]
+    for s in a:
+        if s.startswith('@include '):
+            fn = s.split(' ', 1)[1].strip()
+            with open(os.path.join(base_path, fn)) as f:
+                res.extend(f.readlines())
+        else:
+            res.append(s)
+    return res
+
+
+def expand_variables(s: str) -> str:
+    return s.replace('<ROOT>', root_dir)
+
+
+def expand_errors(input: List[str], output: List[str], fnam: str) -> None:
+    """Transform comments such as '# E: message' or
+    '# E:3: message' in input.
+
+    The result is lines like 'fnam:line: error: message'.
+    """
+
+    for i in range(len(input)):
+        # The first in the split things isn't a comment
+        for possible_err_comment in input[i].split(' # ')[1:]:
+            m = re.search(
+                '^([ENW]):((?P<col>\d+):)? (?P<message>.*)$',
+                possible_err_comment.strip())
+            if m:
+                if m.group(1) == 'E':
+                    severity = 'error'
+                elif m.group(1) == 'N':
+                    severity = 'note'
+                elif m.group(1) == 'W':
+                    severity = 'warning'
+                col = m.group('col')
+                if col is None:
+                    output.append(
+                        '{}:{}: {}: {}'.format(fnam, i + 1, severity, m.group('message')))
+                else:
+                    output.append('{}:{}:{}: {}: {}'.format(
+                        fnam, i + 1, col, severity, m.group('message')))
+
+
+def fix_win_path(line: str) -> str:
+    r"""Changes Windows paths to Linux paths in error messages.
+
+    E.g. foo\bar.py -> foo/bar.py.
+    """
+    line = line.replace(root_dir, root_dir.replace('\\', '/'))
+    m = re.match(r'^([\S/]+):(\d+:)?(\s+.*)', line)
+    if not m:
+        return line
+    else:
+        filename, lineno, message = m.groups()
+        return '{}:{}{}'.format(filename.replace('\\', '/'),
+                                lineno or '', message)
+
+
+def fix_cobertura_filename(line: str) -> str:
+    r"""Changes filename paths to Linux paths in Cobertura output files.
+
+    E.g. filename="pkg\subpkg\a.py" -> filename="pkg/subpkg/a.py".
+    """
+    m = re.search(r'<class .* filename="(?P<filename>.*?)"', line)
+    if not m:
+        return line
+    return '{}{}{}'.format(line[:m.start(1)],
+                           m.group('filename').replace('\\', '/'),
+                           line[m.end(1):])
+
+
+##
+#
+# pytest setup
+#
+##
+
+
+# This function name is special to pytest.  See
+# http://doc.pytest.org/en/latest/writing_plugins.html#initialization-command-line-and-configuration-hooks
+def pytest_addoption(parser: Any) -> None:
+    group = parser.getgroup('mypy')
+    group.addoption('--update-data', action='store_true', default=False,
+                    help='Update test data to reflect actual output'
+                         ' (supported only for certain tests)')
+
+
+# This function name is special to pytest.  See
+# http://doc.pytest.org/en/latest/writing_plugins.html#collection-hooks
+def pytest_pycollect_makeitem(collector: Any, name: str, obj: Any) -> Any:
+    if not isinstance(obj, type) or not issubclass(obj, DataSuite):
+        return None
+    return MypyDataSuite(name, parent=collector)
+
+
+class MypyDataSuite(pytest.Class):  # type: ignore  # inheriting from Any
+    def collect(self) -> Iterator['MypyDataCase']:
+        for case in self.obj.cases():
+            yield MypyDataCase(case.name, self, case)
+
+
+class MypyDataCase(pytest.Item):  # type: ignore  # inheriting from Any
+    def __init__(self, name: str, parent: MypyDataSuite, obj: DataDrivenTestCase) -> None:
+        self.skip = False
+        if name.endswith('-skip'):
+            self.skip = True
+            name = name[:-len('-skip')]
+
+        super().__init__(name, parent)
+        self.obj = obj
+
+    def runtest(self) -> None:
+        if self.skip:
+            pytest.skip()
+        update_data = self.config.getoption('--update-data', False)
+        self.parent.obj(update_data=update_data).run_case(self.obj)
+
+    def setup(self) -> None:
+        self.obj.set_up()
+
+    def teardown(self) -> None:
+        self.obj.tear_down()
+
+    def reportinfo(self) -> Tuple[str, int, str]:
+        return self.obj.file, self.obj.line, self.obj.name
+
+    def repr_failure(self, excinfo: Any) -> str:
+        if excinfo.errisinstance(SystemExit):
+            # We assume that before doing exit() (which raises SystemExit) we've printed
+            # enough context about what happened so that a stack trace is not useful.
+            # In particular, uncaught exceptions during semantic analysis or type checking
+            # call exit() and they already print out a stack trace.
+            excrepr = excinfo.exconly()
+        else:
+            self.parent._prunetraceback(excinfo)
+            excrepr = excinfo.getrepr(style='short')
+
+        return "data: {}:{}:\n{}".format(self.obj.file, self.obj.line, excrepr)
+
+
+class DataSuite:
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
+        return []
+
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        raise NotImplementedError
diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py
new file mode 100644
index 0000000..2f28ab1
--- /dev/null
+++ b/mypy/test/helpers.py
@@ -0,0 +1,309 @@
+import os
+import re
+import sys
+import time
+
+from typing import List, Dict, Tuple, Callable, Any
+
+from mypy import defaults
+from mypy.myunit import AssertionFailure
+from mypy.test.data import DataDrivenTestCase
+
+
+# AssertStringArraysEqual displays special line alignment helper messages if
+# the first different line has at least this many characters,
+MIN_LINE_LENGTH_FOR_ALIGNMENT = 5
+
+
+def assert_string_arrays_equal(expected: List[str], actual: List[str],
+                               msg: str) -> None:
+    """Assert that two string arrays are equal.
+
+    Display any differences in a human-readable form.
+    """
+
+    actual = clean_up(actual)
+
+    if actual != expected:
+        num_skip_start = num_skipped_prefix_lines(expected, actual)
+        num_skip_end = num_skipped_suffix_lines(expected, actual)
+
+        sys.stderr.write('Expected:\n')
+
+        # If omit some lines at the beginning, indicate it by displaying a line
+        # with '...'.
+        if num_skip_start > 0:
+            sys.stderr.write('  ...\n')
+
+        # Keep track of the first different line.
+        first_diff = -1
+
+        # Display only this many first characters of identical lines.
+        width = 75
+
+        for i in range(num_skip_start, len(expected) - num_skip_end):
+            if i >= len(actual) or expected[i] != actual[i]:
+                if first_diff < 0:
+                    first_diff = i
+                sys.stderr.write('  {:<45} (diff)'.format(expected[i]))
+            else:
+                e = expected[i]
+                sys.stderr.write('  ' + e[:width])
+                if len(e) > width:
+                    sys.stderr.write('...')
+            sys.stderr.write('\n')
+        if num_skip_end > 0:
+            sys.stderr.write('  ...\n')
+
+        sys.stderr.write('Actual:\n')
+
+        if num_skip_start > 0:
+            sys.stderr.write('  ...\n')
+
+        for j in range(num_skip_start, len(actual) - num_skip_end):
+            if j >= len(expected) or expected[j] != actual[j]:
+                sys.stderr.write('  {:<45} (diff)'.format(actual[j]))
+            else:
+                a = actual[j]
+                sys.stderr.write('  ' + a[:width])
+                if len(a) > width:
+                    sys.stderr.write('...')
+            sys.stderr.write('\n')
+        if actual == []:
+            sys.stderr.write('  (empty)\n')
+        if num_skip_end > 0:
+            sys.stderr.write('  ...\n')
+
+        sys.stderr.write('\n')
+
+        if first_diff >= 0 and first_diff < len(actual) and (
+                len(expected[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT
+                or len(actual[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT):
+            # Display message that helps visualize the differences between two
+            # long lines.
+            show_align_message(expected[first_diff], actual[first_diff])
+
+        raise AssertionFailure(msg)
+
+
+def update_testcase_output(testcase: DataDrivenTestCase, output: List[str]) -> None:
+    testcase_path = os.path.join(testcase.old_cwd, testcase.file)
+    with open(testcase_path) as f:
+        data_lines = f.read().splitlines()
+    test = '\n'.join(data_lines[testcase.line:testcase.lastline])
+
+    mapping = {}  # type: Dict[str, List[str]]
+    for old, new in zip(testcase.output, output):
+        PREFIX = 'error:'
+        ind = old.find(PREFIX)
+        if ind != -1 and old[:ind] == new[:ind]:
+            old, new = old[ind + len(PREFIX):], new[ind + len(PREFIX):]
+        mapping.setdefault(old, []).append(new)
+
+    for old in mapping:
+        if test.count(old) == len(mapping[old]):
+            betweens = test.split(old)
+
+            # Interleave betweens and mapping[old]
+            from itertools import chain
+            interleaved = [betweens[0]] + \
+                list(chain.from_iterable(zip(mapping[old], betweens[1:])))
+            test = ''.join(interleaved)
+
+    data_lines[testcase.line:testcase.lastline] = [test]
+    data = '\n'.join(data_lines)
+    with open(testcase_path, 'w') as f:
+        print(data, file=f)
+
+
+def show_align_message(s1: str, s2: str) -> None:
+    """Align s1 and s2 so that the their first difference is highlighted.
+
+    For example, if s1 is 'foobar' and s2 is 'fobar', display the
+    following lines:
+
+      E: foobar
+      A: fobar
+           ^
+
+    If s1 and s2 are long, only display a fragment of the strings around the
+    first difference. If s1 is very short, do nothing.
+    """
+
+    # Seeing what went wrong is trivial even without alignment if the expected
+    # string is very short. In this case do nothing to simplify output.
+    if len(s1) < 4:
+        return
+
+    maxw = 72  # Maximum number of characters shown
+
+    sys.stderr.write('Alignment of first line difference:\n')
+
+    trunc = False
+    while s1[:30] == s2[:30]:
+        s1 = s1[10:]
+        s2 = s2[10:]
+        trunc = True
+
+    if trunc:
+        s1 = '...' + s1
+        s2 = '...' + s2
+
+    max_len = max(len(s1), len(s2))
+    extra = ''
+    if max_len > maxw:
+        extra = '...'
+
+    # Write a chunk of both lines, aligned.
+    sys.stderr.write('  E: {}{}\n'.format(s1[:maxw], extra))
+    sys.stderr.write('  A: {}{}\n'.format(s2[:maxw], extra))
+    # Write an indicator character under the different columns.
+    sys.stderr.write('     ')
+    for j in range(min(maxw, max(len(s1), len(s2)))):
+        if s1[j:j + 1] != s2[j:j + 1]:
+            sys.stderr.write('^')  # Difference
+            break
+        else:
+            sys.stderr.write(' ')  # Equal
+    sys.stderr.write('\n')
+
+
+def assert_string_arrays_equal_wildcards(expected: List[str],
+                                         actual: List[str],
+                                         msg: str) -> None:
+    # Like above, but let a line with only '...' in expected match any number
+    # of lines in actual.
+    actual = clean_up(actual)
+
+    while actual != [] and actual[-1] == '':
+        actual = actual[:-1]
+
+    # Expand "..." wildcards away.
+    expected = match_array(expected, actual)
+    assert_string_arrays_equal(expected, actual, msg)
+
+
+def clean_up(a: List[str]) -> List[str]:
+    """Remove common directory prefix from all strings in a.
+
+    This uses a naive string replace; it seems to work well enough. Also
+    remove trailing carriage returns.
+    """
+    res = []
+    for s in a:
+        prefix = os.sep
+        ss = s
+        for p in prefix, prefix.replace(os.sep, '/'):
+            if p != '/' and p != '//' and p != '\\' and p != '\\\\':
+                ss = ss.replace(p, '')
+        # Ignore spaces at end of line.
+        ss = re.sub(' +$', '', ss)
+        res.append(re.sub('\\r$', '', ss))
+    return res
+
+
+def match_array(pattern: List[str], target: List[str]) -> List[str]:
+    """Expand '...' wildcards in pattern by matching against target."""
+
+    res = []  # type: List[str]
+    i = 0
+    j = 0
+
+    while i < len(pattern):
+        if pattern[i] == '...':
+            # Wildcard in pattern.
+            if i + 1 == len(pattern):
+                # Wildcard at end of pattern; match the rest of target.
+                res.extend(target[j:])
+                # Finished.
+                break
+            else:
+                # Must find the instance of the next pattern line in target.
+                jj = j
+                while jj < len(target):
+                    if target[jj] == pattern[i + 1]:
+                        break
+                    jj += 1
+                if jj == len(target):
+                    # No match. Get out.
+                    res.extend(pattern[i:])
+                    break
+                res.extend(target[j:jj])
+                i += 1
+                j = jj
+        elif (j < len(target) and (pattern[i] == target[j]
+                                   or (i + 1 < len(pattern)
+                                       and j + 1 < len(target)
+                                       and pattern[i + 1] == target[j + 1]))):
+            # In sync; advance one line. The above condition keeps sync also if
+            # only a single line is different, but loses it if two consecutive
+            # lines fail to match.
+            res.append(pattern[i])
+            i += 1
+            j += 1
+        else:
+            # Out of sync. Get out.
+            res.extend(pattern[i:])
+            break
+    return res
+
+
+def num_skipped_prefix_lines(a1: List[str], a2: List[str]) -> int:
+    num_eq = 0
+    while num_eq < min(len(a1), len(a2)) and a1[num_eq] == a2[num_eq]:
+        num_eq += 1
+    return max(0, num_eq - 4)
+
+
+def num_skipped_suffix_lines(a1: List[str], a2: List[str]) -> int:
+    num_eq = 0
+    while (num_eq < min(len(a1), len(a2))
+           and a1[-num_eq - 1] == a2[-num_eq - 1]):
+        num_eq += 1
+    return max(0, num_eq - 4)
+
+
+def testfile_pyversion(path: str) -> Tuple[int, int]:
+    if path.endswith('python2.test'):
+        return defaults.PYTHON2_VERSION
+    else:
+        return defaults.PYTHON3_VERSION
+
+
+def testcase_pyversion(path: str, testcase_name: str) -> Tuple[int, int]:
+    if testcase_name.endswith('python2'):
+        return defaults.PYTHON2_VERSION
+    else:
+        return testfile_pyversion(path)
+
+
+def normalize_error_messages(messages: List[str]) -> List[str]:
+    """Translate an array of error messages to use / as path separator."""
+
+    a = []
+    for m in messages:
+        a.append(m.replace(os.sep, '/'))
+    return a
+
+
+def retry_on_error(func: Callable[[], Any], max_wait: float = 1.0) -> None:
+    """Retry callback with exponential backoff when it raises OSError.
+
+    If the function still generates an error after max_wait seconds, propagate
+    the exception.
+
+    This can be effective against random file system operation failures on
+    Windows.
+    """
+    t0 = time.time()
+    wait_time = 0.01
+    while True:
+        try:
+            func()
+            return
+        except OSError:
+            wait_time = min(wait_time * 2, t0 + max_wait - time.time())
+            if wait_time <= 0.01:
+                # Done enough waiting, the error seems persistent.
+                raise
+            time.sleep(wait_time)
diff --git a/mypy/test/testargs.py b/mypy/test/testargs.py
new file mode 100644
index 0000000..4e27e37
--- /dev/null
+++ b/mypy/test/testargs.py
@@ -0,0 +1,18 @@
+"""Ensure the argparse parser and Options class are in sync.
+
+In particular, verify that the argparse defaults are the same as the Options
+defaults, and that argparse doesn't assign any new members to the Options
+object it creates.
+"""
+
+import typing
+from mypy.myunit import Suite, assert_equal
+from mypy.options import Options, BuildType
+from mypy.main import process_options
+
+
+class ArgSuite(Suite):
+    def test_coherence(self) -> None:
+        options = Options()
+        _, parsed_options = process_options([], require_targets=False)
+        assert_equal(options, parsed_options)
diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py
new file mode 100644
index 0000000..3aae19f
--- /dev/null
+++ b/mypy/test/testcheck.py
@@ -0,0 +1,364 @@
+"""Type checker test cases"""
+
+import os
+import re
+import shutil
+import sys
+import time
+import typed_ast
+
+from typing import Dict, List, Optional, Set, Tuple
+
+from mypy import build, defaults
+from mypy.main import process_options
+from mypy.build import BuildSource, find_module_clear_caches
+from mypy.myunit import AssertionFailure
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import (
+    assert_string_arrays_equal, normalize_error_messages,
+    retry_on_error, testcase_pyversion, update_testcase_output,
+)
+from mypy.errors import CompileError
+from mypy.options import Options
+
+from mypy import experiments
+
+# List of files that contain test case descriptions.
+files = [
+    'check-basic.test',
+    'check-callable.test',
+    'check-classes.test',
+    'check-statements.test',
+    'check-generics.test',
+    'check-dynamic-typing.test',
+    'check-inference.test',
+    'check-inference-context.test',
+    'check-kwargs.test',
+    'check-overloading.test',
+    'check-type-checks.test',
+    'check-abstract.test',
+    'check-multiple-inheritance.test',
+    'check-super.test',
+    'check-modules.test',
+    'check-typevar-values.test',
+    'check-unsupported.test',
+    'check-unreachable-code.test',
+    'check-unions.test',
+    'check-isinstance.test',
+    'check-lists.test',
+    'check-namedtuple.test',
+    'check-typeddict.test',
+    'check-type-aliases.test',
+    'check-ignore.test',
+    'check-type-promotion.test',
+    'check-semanal-error.test',
+    'check-flags.test',
+    'check-incremental.test',
+    'check-serialize.test',
+    'check-bound.test',
+    'check-optional.test',
+    'check-fastparse.test',
+    'check-warnings.test',
+    'check-async-await.test',
+    'check-newtype.test',
+    'check-class-namedtuple.test',
+    'check-selftype.test',
+    'check-python2.test',
+    'check-columns.test',
+    'check-functions.test',
+    'check-tuples.test',
+    'check-expressions.test',
+    'check-generic-subtyping.test',
+    'check-varargs.test',
+    'check-newsyntax.test',
+    'check-underscores.test',
+    'check-classvar.test',
+    'check-enum.test',
+    'check-incomplete-fixture.test',
+    'check-custom-plugin.test',
+    'check-default-plugin.test',
+]
+
+
+class TypeCheckSuite(DataSuite):
+    def __init__(self, *, update_data: bool = False) -> None:
+        self.update_data = update_data
+
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  None, test_temp_dir, True)
+        return c
+
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        incremental = ('incremental' in testcase.name.lower()
+                       or 'incremental' in testcase.file
+                       or 'serialize' in testcase.file)
+        optional = 'optional' in testcase.file
+        old_strict_optional = experiments.STRICT_OPTIONAL
+        try:
+            if incremental:
+                # Incremental tests are run once with a cold cache, once with a warm cache.
+                # Expect success on first run, errors from testcase.output (if any) on second run.
+                # We briefly sleep to make sure file timestamps are distinct.
+                self.clear_cache()
+                num_steps = max([2] + list(testcase.output2.keys()))
+                # Check that there are no file changes beyond the last run (they would be ignored).
+                for dn, dirs, files in os.walk(os.curdir):
+                    for file in files:
+                        m = re.search(r'\.([2-9])$', file)
+                        if m and int(m.group(1)) > num_steps:
+                            raise ValueError(
+                                'Output file {} exists though test case only has {} runs'.format(
+                                    file, num_steps))
+                for step in range(1, num_steps + 1):
+                    self.run_case_once(testcase, step)
+            elif optional:
+                experiments.STRICT_OPTIONAL = True
+                self.run_case_once(testcase)
+            else:
+                self.run_case_once(testcase)
+        finally:
+            experiments.STRICT_OPTIONAL = old_strict_optional
+
+    def clear_cache(self) -> None:
+        dn = defaults.CACHE_DIR
+
+        if os.path.exists(dn):
+            shutil.rmtree(dn)
+
+    def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int = 0) -> None:
+        find_module_clear_caches()
+        original_program_text = '\n'.join(testcase.input)
+        module_data = self.parse_module(original_program_text, incremental_step)
+
+        if incremental_step:
+            if incremental_step == 1:
+                # In run 1, copy program text to program file.
+                for module_name, program_path, program_text in module_data:
+                    if module_name == '__main__':
+                        with open(program_path, 'w') as f:
+                            f.write(program_text)
+                        break
+            elif incremental_step > 1:
+                # In runs 2+, copy *.[num] files to * files.
+                for dn, dirs, files in os.walk(os.curdir):
+                    for file in files:
+                        if file.endswith('.' + str(incremental_step)):
+                            full = os.path.join(dn, file)
+                            target = full[:-2]
+                            # Use retries to work around potential flakiness on Windows (AppVeyor).
+                            retry_on_error(lambda: shutil.copy(full, target))
+
+                            # In some systems, mtime has a resolution of 1 second which can cause
+                            # annoying-to-debug issues when a file has the same size after a
+                            # change. We manually set the mtime to circumvent this.
+                            new_time = os.stat(target).st_mtime + 1
+                            os.utime(target, times=(new_time, new_time))
+                # Delete files scheduled to be deleted in [delete <path>.num] sections.
+                for path in testcase.deleted_paths.get(incremental_step, set()):
+                    # Use retries to work around potential flakiness on Windows (AppVeyor).
+                    retry_on_error(lambda: os.remove(path))
+
+        # Parse options after moving files (in case mypy.ini is being moved).
+        options = self.parse_options(original_program_text, testcase, incremental_step)
+        options.use_builtins_fixtures = True
+        options.show_traceback = True
+        if 'optional' in testcase.file:
+            options.strict_optional = True
+        if incremental_step:
+            options.incremental = True
+        else:
+            options.cache_dir = os.devnull  # Dont waste time writing cache
+
+        sources = []
+        for module_name, program_path, program_text in module_data:
+            # Always set to none so we're forced to reread the module in incremental mode
+            sources.append(BuildSource(program_path, module_name,
+                                       None if incremental_step else program_text))
+        res = None
+        try:
+            res = build.build(sources=sources,
+                              options=options,
+                              alt_lib_path=test_temp_dir)
+            a = res.errors
+        except CompileError as e:
+            a = e.messages
+        a = normalize_error_messages(a)
+
+        # Make sure error messages match
+        if incremental_step == 0:
+            # Not incremental
+            msg = 'Unexpected type checker output ({}, line {})'
+            output = testcase.output
+        elif incremental_step == 1:
+            msg = 'Unexpected type checker output in incremental, run 1 ({}, line {})'
+            output = testcase.output
+        elif incremental_step > 1:
+            msg = ('Unexpected type checker output in incremental, run {}'.format(
+                incremental_step) + ' ({}, line {})')
+            output = testcase.output2.get(incremental_step, [])
+        else:
+            raise AssertionError()
+
+        if output != a and self.update_data:
+            update_testcase_output(testcase, a)
+        assert_string_arrays_equal(output, a, msg.format(testcase.file, testcase.line))
+
+        if incremental_step and res:
+            if options.follow_imports == 'normal' and testcase.output is None:
+                self.verify_cache(module_data, a, res.manager)
+            if incremental_step > 1:
+                suffix = '' if incremental_step == 2 else str(incremental_step - 1)
+                self.check_module_equivalence(
+                    'rechecked' + suffix,
+                    testcase.expected_rechecked_modules.get(incremental_step - 1),
+                    res.manager.rechecked_modules)
+                self.check_module_equivalence(
+                    'stale' + suffix,
+                    testcase.expected_stale_modules.get(incremental_step - 1),
+                    res.manager.stale_modules)
+
+    def check_module_equivalence(self, name: str,
+                                 expected: Optional[Set[str]], actual: Set[str]) -> None:
+        if expected is not None:
+            expected_normalized = sorted(expected)
+            actual_normalized = sorted(actual.difference({"__main__"}))
+            assert_string_arrays_equal(
+                expected_normalized,
+                actual_normalized,
+                ('Actual modules ({}) do not match expected modules ({}) '
+                 'for "[{} ...]"').format(
+                    ', '.join(actual_normalized),
+                    ', '.join(expected_normalized),
+                    name))
+
+    def verify_cache(self, module_data: List[Tuple[str, str, str]], a: List[str],
+                     manager: build.BuildManager) -> None:
+        # There should be valid cache metadata for each module except
+        # those in error_paths; for those there should not be.
+        #
+        # NOTE: When A imports B and there's an error in B, the cache
+        # data for B is invalidated, but the cache data for A remains.
+        # However build.process_graphs() will ignore A's cache data.
+        #
+        # Also note that when A imports B, and there's an error in A
+        # _due to a valid change in B_, the cache data for B will be
+        # invalidated and updated, but the old cache data for A will
+        # remain unchanged. As before, build.process_graphs() will
+        # ignore A's (old) cache data.
+        error_paths = self.find_error_paths(a)
+        modules = self.find_module_files()
+        modules.update({module_name: path for module_name, path, text in module_data})
+        missing_paths = self.find_missing_cache_files(modules, manager)
+        if not missing_paths.issubset(error_paths):
+            raise AssertionFailure("cache data discrepancy %s != %s" %
+                                   (missing_paths, error_paths))
+
+    def find_error_paths(self, a: List[str]) -> Set[str]:
+        hits = set()
+        for line in a:
+            m = re.match(r'([^\s:]+):\d+: error:', line)
+            if m:
+                # Normalize to Linux paths.
+                p = m.group(1).replace(os.path.sep, '/')
+                hits.add(p)
+        return hits
+
+    def find_module_files(self) -> Dict[str, str]:
+        modules = {}
+        for dn, dirs, files in os.walk(test_temp_dir):
+            dnparts = dn.split(os.sep)
+            assert dnparts[0] == test_temp_dir
+            del dnparts[0]
+            for file in files:
+                if file.endswith('.py'):
+                    if file == "__init__.py":
+                        # If the file path is `a/b/__init__.py`, exclude the file name
+                        # and make sure the module id is just `a.b`, not `a.b.__init__`.
+                        id = '.'.join(dnparts)
+                    else:
+                        base, ext = os.path.splitext(file)
+                        id = '.'.join(dnparts + [base])
+                    modules[id] = os.path.join(dn, file)
+        return modules
+
+    def find_missing_cache_files(self, modules: Dict[str, str],
+                                 manager: build.BuildManager) -> Set[str]:
+        missing = {}
+        for id, path in modules.items():
+            meta = build.find_cache_meta(id, path, manager)
+            if not build.validate_meta(meta, id, path, manager):
+                missing[id] = path
+        return set(missing.values())
+
+    def parse_module(self,
+                     program_text: str,
+                     incremental_step: int = 0) -> List[Tuple[str, str, str]]:
+        """Return the module and program names for a test case.
+
+        Normally, the unit tests will parse the default ('__main__')
+        module and follow all the imports listed there. You can override
+        this behavior and instruct the tests to check multiple modules
+        by using a comment like this in the test case input:
+
+          # cmd: mypy -m foo.bar foo.baz
+
+        You can also use `# cmdN:` to have a different cmd for incremental
+        step N (2, 3, ...).
+
+        Return a list of tuples (module name, file name, program text).
+        """
+        m = re.search('# cmd: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE)
+        regex = '# cmd{}: mypy -m ([a-zA-Z0-9_. ]+)$'.format(incremental_step)
+        alt_m = re.search(regex, program_text, flags=re.MULTILINE)
+        if alt_m is not None and incremental_step > 1:
+            # Optionally return a different command if in a later step
+            # of incremental mode, otherwise default to reusing the
+            # original cmd.
+            m = alt_m
+
+        if m:
+            # The test case wants to use a non-default main
+            # module. Look up the module and give it as the thing to
+            # analyze.
+            module_names = m.group(1)
+            out = []
+            for module_name in module_names.split(' '):
+                path = build.find_module(module_name, [test_temp_dir])
+                with open(path) as f:
+                    program_text = f.read()
+                out.append((module_name, path, program_text))
+            return out
+        else:
+            return [('__main__', 'main', program_text)]
+
+    def parse_options(self, program_text: str, testcase: DataDrivenTestCase,
+                      incremental_step: int) -> Options:
+        options = Options()
+        flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE)
+        if incremental_step > 1:
+            flags2 = re.search('# flags{}: (.*)$'.format(incremental_step), program_text,
+                               flags=re.MULTILINE)
+            if flags2:
+                flags = flags2
+
+        flag_list = None
+        if flags:
+            flag_list = flags.group(1).split()
+            targets, options = process_options(flag_list, require_targets=False)
+            if targets:
+                # TODO: support specifying targets via the flags pragma
+                raise RuntimeError('Specifying targets via the flags pragma is not supported.')
+        else:
+            options = Options()
+
+        # Allow custom python version to override testcase_pyversion
+        if (not flag_list or
+                all(flag not in flag_list for flag in ['--python-version', '-2', '--py2'])):
+            options.python_version = testcase_pyversion(testcase.file, testcase.name)
+
+        return options
diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py
new file mode 100644
index 0000000..3452db6
--- /dev/null
+++ b/mypy/test/testcmdline.py
@@ -0,0 +1,112 @@
+"""Test cases for the command line.
+
+To begin we test that "mypy <directory>[/]" always recurses down the
+whole tree.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+from typing import Tuple, List, Dict, Set
+
+from mypy.myunit import Suite, SkipTestCaseException, AssertionFailure
+from mypy.test.config import test_data_prefix, test_temp_dir
+from mypy.test.data import fix_cobertura_filename
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages
+from mypy.version import __version__, base_version
+
+# Path to Python 3 interpreter
+python3_path = sys.executable
+
+# Files containing test case descriptions.
+cmdline_files = [
+    'cmdline.test',
+    'reports.test',
+]
+
+
+class PythonEvaluationSuite(Suite):
+
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in cmdline_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  test_python_evaluation,
+                                  base_path=test_temp_dir,
+                                  optional_out=True,
+                                  native_sep=True)
+        return c
+
+
+def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
+    # Write the program to a file.
+    program = '_program.py'
+    program_path = os.path.join(test_temp_dir, program)
+    with open(program_path, 'w') as file:
+        for s in testcase.input:
+            file.write('{}\n'.format(s))
+    args = parse_args(testcase.input[0])
+    args.append('--show-traceback')
+    # Type check the program.
+    fixed = [python3_path,
+             os.path.join(testcase.old_cwd, 'scripts', 'mypy')]
+    process = subprocess.Popen(fixed + args,
+                               stdout=subprocess.PIPE,
+                               stderr=subprocess.STDOUT,
+                               cwd=test_temp_dir)
+    outb = process.stdout.read()
+    # Split output into lines.
+    out = [s.rstrip('\n\r') for s in str(outb, 'utf8').splitlines()]
+    # Remove temp file.
+    os.remove(program_path)
+    # Compare actual output to expected.
+    if testcase.output_files:
+        for path, expected_content in testcase.output_files:
+            if not os.path.exists(path):
+                raise AssertionFailure(
+                    'Expected file {} was not produced by test case'.format(path))
+            with open(path, 'r') as output_file:
+                actual_output_content = output_file.read().splitlines()
+            normalized_output = normalize_file_output(actual_output_content,
+                                                      os.path.abspath(test_temp_dir))
+            if testcase.native_sep and os.path.sep == '\\':
+                normalized_output = [fix_cobertura_filename(line) for line in normalized_output]
+            normalized_output = normalize_error_messages(normalized_output)
+            assert_string_arrays_equal(expected_content.splitlines(), normalized_output,
+                                       'Output file {} did not match its expected output'.format(
+                                           path))
+    else:
+        out = normalize_error_messages(out)
+        assert_string_arrays_equal(testcase.output, out,
+                                   'Invalid output ({}, line {})'.format(
+                                       testcase.file, testcase.line))
+
+
+def parse_args(line: str) -> List[str]:
+    """Parse the first line of the program for the command line.
+
+    This should have the form
+
+      # cmd: mypy <options>
+
+    For example:
+
+      # cmd: mypy pkg/
+    """
+    m = re.match('# cmd: mypy (.*)$', line)
+    if not m:
+        return []  # No args; mypy will spit out an error.
+    return m.group(1).split()
+
+
+def normalize_file_output(content: List[str], current_abs_path: str) -> List[str]:
+    """Normalize file output for comparison."""
+    timestamp_regex = re.compile('\d{10}')
+    result = [x.replace(current_abs_path, '$PWD') for x in content]
+    result = [re.sub(r'\b' + re.escape(__version__) + r'\b', '$VERSION', x) for x in result]
+    result = [re.sub(r'\b' + re.escape(base_version) + r'\b', '$VERSION', x) for x in result]
+    result = [timestamp_regex.sub('$TIMESTAMP', x) for x in result]
+    return result
diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py
new file mode 100644
index 0000000..a648782
--- /dev/null
+++ b/mypy/test/testdeps.py
@@ -0,0 +1,67 @@
+"""Test cases for generating node-level dependencies (for fine-grained incremental checking)"""
+
+import os
+from typing import List, Tuple, Dict, Optional
+
+from mypy import build
+from mypy.build import BuildSource
+from mypy.errors import CompileError
+from mypy.nodes import MypyFile, Expression
+from mypy.options import Options
+from mypy.server.deps import get_dependencies
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.types import Type
+
+files = [
+    'deps.test'
+]
+
+
+class GetDependenciesSuite(DataSuite):
+    def __init__(self, *, update_data: bool) -> None:
+        pass
+
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  None, test_temp_dir, True)
+        return c
+
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        src = '\n'.join(testcase.input)
+        messages, files, type_map = self.build(src)
+        a = messages
+        assert files is not None and type_map is not None, ('cases where CompileError'
+                                                            ' occurred should not be run')
+        deps = get_dependencies('__main__', files['__main__'], type_map)
+
+        for source, targets in sorted(deps.items()):
+            line = '%s -> %s' % (source, ', '.join(sorted(targets)))
+            # Clean up output a bit
+            line = line.replace('__main__', 'm')
+            a.append(line)
+
+        assert_string_arrays_equal(
+            testcase.output, a,
+            'Invalid output ({}, line {})'.format(testcase.file,
+                                                  testcase.line))
+
+    def build(self, source: str) -> Tuple[List[str],
+                                          Optional[Dict[str, MypyFile]],
+                                          Optional[Dict[Expression, Type]]]:
+        options = Options()
+        options.use_builtins_fixtures = True
+        options.show_traceback = True
+        options.cache_dir = os.devnull
+        try:
+            result = build.build(sources=[BuildSource('main', None, source)],
+                                 options=options,
+                                 alt_lib_path=test_temp_dir)
+        except CompileError as e:
+            # TODO: Should perhaps not return None here.
+            return e.messages, None, None
+        return result.errors, result.files, result.types
diff --git a/mypy/test/testdiff.py b/mypy/test/testdiff.py
new file mode 100644
index 0000000..84e5389
--- /dev/null
+++ b/mypy/test/testdiff.py
@@ -0,0 +1,75 @@
+"""Test cases for AST diff (used for fine-grained incremental checking)"""
+
+import os
+from typing import List, Tuple, Dict, Optional
+
+from mypy import build
+from mypy.build import BuildSource
+from mypy.errors import CompileError
+from mypy.nodes import MypyFile
+from mypy.options import Options
+from mypy.server.astdiff import compare_symbol_tables
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import assert_string_arrays_equal
+
+
+files = [
+    'diff.test'
+]
+
+
+class ASTDiffSuite(DataSuite):
+    def __init__(self, *, update_data: bool) -> None:
+        pass
+
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  None, test_temp_dir, True)
+        return c
+
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        first_src = '\n'.join(testcase.input)
+        files_dict = dict(testcase.files)
+        second_src = files_dict['tmp/next.py']
+
+        messages1, files1 = self.build(first_src)
+        messages2, files2 = self.build(second_src)
+
+        a = []
+        if messages1:
+            a.extend(messages1)
+        if messages2:
+            a.append('== next ==')
+            a.extend(messages2)
+
+        assert files1 is not None and files2 is not None, ('cases where CompileError'
+                                                           ' occurred should not be run')
+        diff = compare_symbol_tables(
+            '__main__',
+            files1['__main__'].names,
+            files2['__main__'].names)
+        for trigger in sorted(diff):
+            a.append(trigger)
+
+        assert_string_arrays_equal(
+            testcase.output, a,
+            'Invalid output ({}, line {})'.format(testcase.file,
+                                                  testcase.line))
+
+    def build(self, source: str) -> Tuple[List[str], Optional[Dict[str, MypyFile]]]:
+        options = Options()
+        options.use_builtins_fixtures = True
+        options.show_traceback = True
+        options.cache_dir = os.devnull
+        try:
+            result = build.build(sources=[BuildSource('main', None, source)],
+                                 options=options,
+                                 alt_lib_path=test_temp_dir)
+        except CompileError as e:
+            # TODO: Is it okay to return None?
+            return e.messages, None
+        return result.errors, result.files
diff --git a/mypy/test/testextensions.py b/mypy/test/testextensions.py
new file mode 100644
index 0000000..cf0c99c
--- /dev/null
+++ b/mypy/test/testextensions.py
@@ -0,0 +1,143 @@
+import sys
+import pickle
+import typing
+try:
+    import collections.abc as collections_abc
+except ImportError:
+    import collections as collections_abc  # type: ignore # PY32 and earlier
+from unittest import TestCase, main, skipUnless
+sys.path[0:0] = ['extensions']
+from mypy_extensions import TypedDict
+
+
+class BaseTestCase(TestCase):
+
+    def assertIsSubclass(self, cls, class_or_tuple, msg=None):
+        if not issubclass(cls, class_or_tuple):
+            message = '%r is not a subclass of %r' % (cls, class_or_tuple)
+            if msg is not None:
+                message += ' : %s' % msg
+            raise self.failureException(message)
+
+    def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
+        if issubclass(cls, class_or_tuple):
+            message = '%r is a subclass of %r' % (cls, class_or_tuple)
+            if msg is not None:
+                message += ' : %s' % msg
+            raise self.failureException(message)
+
+
+PY36 = sys.version_info[:2] >= (3, 6)
+
+PY36_TESTS = """
+Label = TypedDict('Label', [('label', str)])
+
+class Point2D(TypedDict):
+    x: int
+    y: int
+
+class LabelPoint2D(Point2D, Label): ...
+
+class Options(TypedDict, total=False):
+    log_level: int
+    log_path: str
+"""
+
+if PY36:
+    exec(PY36_TESTS)
+
+
+class TypedDictTests(BaseTestCase):
+
+    def test_basics_iterable_syntax(self):
+        Emp = TypedDict('Emp', {'name': str, 'id': int})
+        self.assertIsSubclass(Emp, dict)
+        self.assertIsSubclass(Emp, typing.MutableMapping)
+        self.assertNotIsSubclass(Emp, collections_abc.Sequence)
+        jim = Emp(name='Jim', id=1)
+        self.assertIs(type(jim), dict)
+        self.assertEqual(jim['name'], 'Jim')
+        self.assertEqual(jim['id'], 1)
+        self.assertEqual(Emp.__name__, 'Emp')
+        self.assertEqual(Emp.__module__, 'mypy.test.testextensions')
+        self.assertEqual(Emp.__bases__, (dict,))
+        self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
+        self.assertEqual(Emp.__total__, True)
+
+    def test_basics_keywords_syntax(self):
+        Emp = TypedDict('Emp', name=str, id=int)
+        self.assertIsSubclass(Emp, dict)
+        self.assertIsSubclass(Emp, typing.MutableMapping)
+        self.assertNotIsSubclass(Emp, collections_abc.Sequence)
+        jim = Emp(name='Jim', id=1)  # type: ignore # mypy doesn't support keyword syntax yet
+        self.assertIs(type(jim), dict)
+        self.assertEqual(jim['name'], 'Jim')
+        self.assertEqual(jim['id'], 1)
+        self.assertEqual(Emp.__name__, 'Emp')
+        self.assertEqual(Emp.__module__, 'mypy.test.testextensions')
+        self.assertEqual(Emp.__bases__, (dict,))
+        self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
+        self.assertEqual(Emp.__total__, True)
+
+    def test_typeddict_errors(self):
+        Emp = TypedDict('Emp', {'name': str, 'id': int})
+        self.assertEqual(TypedDict.__module__, 'mypy_extensions')
+        jim = Emp(name='Jim', id=1)
+        with self.assertRaises(TypeError):
+            isinstance({}, Emp)  # type: ignore
+        with self.assertRaises(TypeError):
+            isinstance(jim, Emp)  # type: ignore
+        with self.assertRaises(TypeError):
+            issubclass(dict, Emp)  # type: ignore
+        with self.assertRaises(TypeError):
+            TypedDict('Hi', x=1)
+        with self.assertRaises(TypeError):
+            TypedDict('Hi', [('x', int), ('y', 1)])
+        with self.assertRaises(TypeError):
+            TypedDict('Hi', [('x', int)], y=int)
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_py36_class_syntax_usage(self):
+        self.assertEqual(LabelPoint2D.__annotations__, {'x': int, 'y': int, 'label': str})  # noqa
+        self.assertEqual(LabelPoint2D.__bases__, (dict,))  # noqa
+        self.assertEqual(LabelPoint2D.__total__, True)  # noqa
+        self.assertNotIsSubclass(LabelPoint2D, typing.Sequence)  # noqa
+        not_origin = Point2D(x=0, y=1)  # noqa
+        self.assertEqual(not_origin['x'], 0)
+        self.assertEqual(not_origin['y'], 1)
+        other = LabelPoint2D(x=0, y=1, label='hi')  # noqa
+        self.assertEqual(other['label'], 'hi')
+
+    def test_pickle(self):
+        global EmpD  # pickle wants to reference the class by name
+        EmpD = TypedDict('EmpD', name=str, id=int)
+        jane = EmpD({'name': 'jane', 'id': 37})
+        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+            z = pickle.dumps(jane, proto)
+            jane2 = pickle.loads(z)
+            self.assertEqual(jane2, jane)
+            self.assertEqual(jane2, {'name': 'jane', 'id': 37})
+            ZZ = pickle.dumps(EmpD, proto)
+            EmpDnew = pickle.loads(ZZ)
+            self.assertEqual(EmpDnew({'name': 'jane', 'id': 37}), jane)
+
+    def test_optional(self):
+        EmpD = TypedDict('EmpD', name=str, id=int)
+
+        self.assertEqual(typing.Optional[EmpD], typing.Union[None, EmpD])
+        self.assertNotEqual(typing.List[EmpD], typing.Tuple[EmpD])
+
+    def test_total(self):
+        D = TypedDict('D', {'x': int}, total=False)
+        self.assertEqual(D(), {})
+        self.assertEqual(D(x=1), {'x': 1})
+        self.assertEqual(D.__total__, False)
+
+        if PY36:
+            self.assertEqual(Options(), {})  # noqa
+            self.assertEqual(Options(log_level=2), {'log_level': 2})  # noqa
+            self.assertEqual(Options.__total__, False)  # noqa
+
+
+if __name__ == '__main__':
+    main()
diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py
new file mode 100644
index 0000000..fff2e18
--- /dev/null
+++ b/mypy/test/testfinegrained.py
@@ -0,0 +1,120 @@
+"""Test cases for fine-grained incremental checking.
+
+Each test cases runs a batch build followed by one or more fine-grained
+incremental steps. We verify that each step produces the expected output.
+
+See the comment at the top of test-data/unit/fine-grained.test for more
+information.
+"""
+
+import os
+import re
+import shutil
+from typing import List, Tuple, Dict
+
+from mypy import build
+from mypy.build import BuildManager, BuildSource, Graph
+from mypy.errors import Errors, CompileError
+from mypy.nodes import Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression
+from mypy.options import Options
+from mypy.server.astmerge import merge_asts
+from mypy.server.subexpr import get_subexpressions
+from mypy.server.update import FineGrainedBuildManager
+from mypy.strconv import StrConv, indent
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.test.testtypegen import ignore_node
+from mypy.types import TypeStrVisitor, Type
+from mypy.util import short_type
+
+
+files = [
+    'fine-grained.test'
+]
+
+
+class FineGrainedSuite(DataSuite):
+    def __init__(self, *, update_data: bool) -> None:
+        pass
+
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  None, test_temp_dir, True)
+        return c
+
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        main_src = '\n'.join(testcase.input)
+        messages, manager, graph = self.build(main_src)
+
+        a = []
+        if messages:
+            a.extend(messages)
+
+        fine_grained_manager = FineGrainedBuildManager(manager, graph)
+
+        steps = find_steps()
+        for changed_paths in steps:
+            modules = []
+            for module, path in changed_paths:
+                new_path = re.sub(r'\.[0-9]+$', '', path)
+                shutil.copy(path, new_path)
+                modules.append(module)
+
+            new_messages = fine_grained_manager.update(modules)
+            new_messages = [re.sub('^tmp' + re.escape(os.sep), '', message)
+                            for message in new_messages]
+
+            a.append('==')
+            a.extend(new_messages)
+
+        # Normalize paths in test output (for Windows).
+        a = [line.replace('\\', '/') for line in a]
+
+        assert_string_arrays_equal(
+            testcase.output, a,
+            'Invalid output ({}, line {})'.format(testcase.file,
+                                                  testcase.line))
+
+    def build(self, source: str) -> Tuple[List[str], BuildManager, Graph]:
+        options = Options()
+        options.use_builtins_fixtures = True
+        options.show_traceback = True
+        options.cache_dir = os.devnull
+        try:
+            result = build.build(sources=[BuildSource('main', None, source)],
+                                 options=options,
+                                 alt_lib_path=test_temp_dir)
+        except CompileError as e:
+            # TODO: We need a manager and a graph in this case as well
+            assert False, str('\n'.join(e.messages))
+            return e.messages, None, None
+        return result.errors, result.manager, result.graph
+
+
+def find_steps() -> List[List[Tuple[str, str]]]:
+    """Return a list of build step representations.
+
+    Each build step is a list of (module id, path) tuples, and each
+    path is of form 'dir/mod.py.2' (where 2 is the step number).
+    """
+    steps = {}  # type: Dict[int, List[Tuple[str, str]]]
+    for dn, dirs, files in os.walk(test_temp_dir):
+        dnparts = dn.split(os.sep)
+        assert dnparts[0] == test_temp_dir
+        del dnparts[0]
+        for filename in files:
+            m = re.match(r'.*\.([0-9]+)$', filename)
+            if m:
+                num = int(m.group(1))
+                assert num >= 2
+                name = re.sub(r'\.py.*', '', filename)
+                module = '.'.join(dnparts + [name])
+                module = re.sub(r'\.__init__$', '', module)
+                path = os.path.join(dn, filename)
+                steps.setdefault(num, []).append((module, path))
+    max_step = max(steps)
+    return [steps[num] for num in range(2, max_step + 1)]
diff --git a/mypy/test/testgraph.py b/mypy/test/testgraph.py
new file mode 100644
index 0000000..dbbe487
--- /dev/null
+++ b/mypy/test/testgraph.py
@@ -0,0 +1,76 @@
+"""Test cases for graph processing code in build.py."""
+
+from typing import AbstractSet, Dict, Set, List
+
+from mypy.myunit import Suite, assert_equal
+from mypy.build import BuildManager, State, BuildSourceSet
+from mypy.build import topsort, strongly_connected_components, sorted_components, order_ascc
+from mypy.version import __version__
+from mypy.options import Options
+from mypy.report import Reports
+from mypy.plugin import Plugin
+from mypy import defaults
+from mypy.errors import Errors
+
+
+class GraphSuite(Suite):
+
+    def test_topsort(self) -> None:
+        a = frozenset({'A'})
+        b = frozenset({'B'})
+        c = frozenset({'C'})
+        d = frozenset({'D'})
+        data = {a: {b, c}, b: {d}, c: {d}}  # type: Dict[AbstractSet[str], Set[AbstractSet[str]]]
+        res = list(topsort(data))
+        assert_equal(res, [{d}, {b, c}, {a}])
+
+    def test_scc(self) -> None:
+        vertices = {'A', 'B', 'C', 'D'}
+        edges = {'A': ['B', 'C'],
+                 'B': ['C'],
+                 'C': ['B', 'D'],
+                 'D': []}  # type: Dict[str, List[str]]
+        sccs = set(frozenset(x) for x in strongly_connected_components(vertices, edges))
+        assert_equal(sccs,
+                     {frozenset({'A'}),
+                      frozenset({'B', 'C'}),
+                      frozenset({'D'})})
+
+    def _make_manager(self) -> BuildManager:
+        errors = Errors()
+        options = Options()
+        manager = BuildManager(
+            data_dir='',
+            lib_path=[],
+            ignore_prefix='',
+            source_set=BuildSourceSet([]),
+            reports=Reports('', {}),
+            options=options,
+            version_id=__version__,
+            plugin=Plugin(options),
+            errors=errors,
+        )
+        return manager
+
+    def test_sorted_components(self) -> None:
+        manager = self._make_manager()
+        graph = {'a': State('a', None, 'import b, c', manager),
+                 'd': State('d', None, 'pass', manager),
+                 'b': State('b', None, 'import c', manager),
+                 'c': State('c', None, 'import b, d', manager),
+                 }
+        res = sorted_components(graph)
+        assert_equal(res, [frozenset({'d'}), frozenset({'c', 'b'}), frozenset({'a'})])
+
+    def test_order_ascc(self) -> None:
+        manager = self._make_manager()
+        graph = {'a': State('a', None, 'import b, c', manager),
+                 'd': State('d', None, 'def f(): import a', manager),
+                 'b': State('b', None, 'import c', manager),
+                 'c': State('c', None, 'import b, d', manager),
+                 }
+        res = sorted_components(graph)
+        assert_equal(res, [frozenset({'a', 'd', 'c', 'b'})])
+        ascc = res[0]
+        scc = order_ascc(graph, ascc)
+        assert_equal(scc, ['d', 'c', 'b', 'a'])
diff --git a/mypy/test/testinfer.py b/mypy/test/testinfer.py
new file mode 100644
index 0000000..2142456
--- /dev/null
+++ b/mypy/test/testinfer.py
@@ -0,0 +1,223 @@
+"""Test cases for type inference helper functions."""
+
+from typing import List, Optional, Tuple, Union
+
+from mypy.myunit import Suite, assert_equal, assert_true
+from mypy.checkexpr import map_actuals_to_formals
+from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED
+from mypy.types import AnyType, TupleType, Type
+
+
+class MapActualsToFormalsSuite(Suite):
+    """Test cases for checkexpr.map_actuals_to_formals."""
+
+    def test_basic(self) -> None:
+        self.assert_map([], [], [])
+
+    def test_positional_only(self) -> None:
+        self.assert_map([ARG_POS],
+                        [ARG_POS],
+                        [[0]])
+        self.assert_map([ARG_POS, ARG_POS],
+                        [ARG_POS, ARG_POS],
+                        [[0], [1]])
+
+    def test_optional(self) -> None:
+        self.assert_map([],
+                        [ARG_OPT],
+                        [[]])
+        self.assert_map([ARG_POS],
+                        [ARG_OPT],
+                        [[0]])
+        self.assert_map([ARG_POS],
+                        [ARG_OPT, ARG_OPT],
+                        [[0], []])
+
+    def test_callee_star(self) -> None:
+        self.assert_map([],
+                        [ARG_STAR],
+                        [[]])
+        self.assert_map([ARG_POS],
+                        [ARG_STAR],
+                        [[0]])
+        self.assert_map([ARG_POS, ARG_POS],
+                        [ARG_STAR],
+                        [[0, 1]])
+
+    def test_caller_star(self) -> None:
+        self.assert_map([ARG_STAR],
+                        [ARG_STAR],
+                        [[0]])
+        self.assert_map([ARG_POS, ARG_STAR],
+                        [ARG_STAR],
+                        [[0, 1]])
+        self.assert_map([ARG_STAR],
+                        [ARG_POS, ARG_STAR],
+                        [[0], [0]])
+        self.assert_map([ARG_STAR],
+                        [ARG_OPT, ARG_STAR],
+                        [[0], [0]])
+
+    def test_too_many_caller_args(self) -> None:
+        self.assert_map([ARG_POS],
+                        [],
+                        [])
+        self.assert_map([ARG_STAR],
+                        [],
+                        [])
+        self.assert_map([ARG_STAR],
+                        [ARG_POS],
+                        [[0]])
+
+    def test_tuple_star(self) -> None:
+        self.assert_vararg_map(
+            [ARG_STAR],
+            [ARG_POS],
+            [[0]],
+            self.tuple(AnyType()))
+        self.assert_vararg_map(
+            [ARG_STAR],
+            [ARG_POS, ARG_POS],
+            [[0], [0]],
+            self.tuple(AnyType(), AnyType()))
+        self.assert_vararg_map(
+            [ARG_STAR],
+            [ARG_POS, ARG_OPT, ARG_OPT],
+            [[0], [0], []],
+            self.tuple(AnyType(), AnyType()))
+
+    def tuple(self, *args: Type) -> TupleType:
+        return TupleType(list(args), None)
+
+    def test_named_args(self) -> None:
+        self.assert_map(
+            ['x'],
+            [(ARG_POS, 'x')],
+            [[0]])
+        self.assert_map(
+            ['y', 'x'],
+            [(ARG_POS, 'x'), (ARG_POS, 'y')],
+            [[1], [0]])
+
+    def test_some_named_args(self) -> None:
+        self.assert_map(
+            ['y'],
+            [(ARG_OPT, 'x'), (ARG_OPT, 'y'), (ARG_OPT, 'z')],
+            [[], [0], []])
+
+    def test_missing_named_arg(self) -> None:
+        self.assert_map(
+            ['y'],
+            [(ARG_OPT, 'x')],
+            [[]])
+
+    def test_duplicate_named_arg(self) -> None:
+        self.assert_map(
+            ['x', 'x'],
+            [(ARG_OPT, 'x')],
+            [[0, 1]])
+
+    def test_varargs_and_bare_asterisk(self) -> None:
+        self.assert_map(
+            [ARG_STAR],
+            [ARG_STAR, (ARG_NAMED, 'x')],
+            [[0], []])
+        self.assert_map(
+            [ARG_STAR, 'x'],
+            [ARG_STAR, (ARG_NAMED, 'x')],
+            [[0], [1]])
+
+    def test_keyword_varargs(self) -> None:
+        self.assert_map(
+            ['x'],
+            [ARG_STAR2],
+            [[0]])
+        self.assert_map(
+            ['x', ARG_STAR2],
+            [ARG_STAR2],
+            [[0, 1]])
+        self.assert_map(
+            ['x', ARG_STAR2],
+            [(ARG_POS, 'x'), ARG_STAR2],
+            [[0], [1]])
+        self.assert_map(
+            [ARG_POS, ARG_STAR2],
+            [(ARG_POS, 'x'), ARG_STAR2],
+            [[0], [1]])
+
+    def test_both_kinds_of_varargs(self) -> None:
+        self.assert_map(
+            [ARG_STAR, ARG_STAR2],
+            [(ARG_POS, 'x'), (ARG_POS, 'y')],
+            [[0, 1], [0, 1]])
+
+    def test_special_cases(self) -> None:
+        self.assert_map([ARG_STAR],
+                        [ARG_STAR, ARG_STAR2],
+                        [[0], []])
+        self.assert_map([ARG_STAR, ARG_STAR2],
+                        [ARG_STAR, ARG_STAR2],
+                        [[0], [1]])
+        self.assert_map([ARG_STAR2],
+                        [(ARG_POS, 'x'), ARG_STAR2],
+                        [[0], [0]])
+        self.assert_map([ARG_STAR2],
+                        [ARG_STAR2],
+                        [[0]])
+
+    def assert_map(self,
+                   caller_kinds_: List[Union[int, str]],
+                   callee_kinds_: List[Union[int, Tuple[int, str]]],
+                   expected: List[List[int]],
+                   ) -> None:
+        caller_kinds, caller_names = expand_caller_kinds(caller_kinds_)
+        callee_kinds, callee_names = expand_callee_kinds(callee_kinds_)
+        result = map_actuals_to_formals(
+            caller_kinds,
+            caller_names,
+            callee_kinds,
+            callee_names,
+            lambda i: AnyType())
+        assert_equal(result, expected)
+
+    def assert_vararg_map(self,
+                          caller_kinds: List[int],
+                          callee_kinds: List[int],
+                          expected: List[List[int]],
+                          vararg_type: Type,
+                          ) -> None:
+        result = map_actuals_to_formals(
+            caller_kinds,
+            [],
+            callee_kinds,
+            [],
+            lambda i: vararg_type)
+        assert_equal(result, expected)
+
+
+def expand_caller_kinds(kinds_or_names: List[Union[int, str]]
+                        ) -> Tuple[List[int], List[Optional[str]]]:
+    kinds = []
+    names = []
+    for k in kinds_or_names:
+        if isinstance(k, str):
+            kinds.append(ARG_NAMED)
+            names.append(k)
+        else:
+            kinds.append(k)
+            names.append(None)
+    return kinds, names
+
+
+def expand_callee_kinds(kinds_and_names: List[Union[int, Tuple[int, str]]]
+                        ) -> Tuple[List[int], List[Optional[str]]]:
+    kinds = []
+    names = []
+    for v in kinds_and_names:
+        if isinstance(v, tuple):
+            kinds.append(v[0])
+            names.append(v[1])
+        else:
+            kinds.append(v)
+            names.append(None)
+    return kinds, names
diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py
new file mode 100644
index 0000000..9807098
--- /dev/null
+++ b/mypy/test/testmerge.py
@@ -0,0 +1,206 @@
+"""Test cases for AST merge (used for fine-grained incremental checking)"""
+
+import os
+import shutil
+from typing import List, Tuple, Dict
+
+from mypy import build
+from mypy.build import BuildManager, BuildSource, State
+from mypy.errors import Errors, CompileError
+from mypy.nodes import (
+    Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression
+)
+from mypy.options import Options
+from mypy.server.astmerge import merge_asts
+from mypy.server.subexpr import get_subexpressions
+from mypy.server.update import build_incremental_step, replace_modules_with_new_variants
+from mypy.strconv import StrConv, indent
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.test.testtypegen import ignore_node
+from mypy.types import TypeStrVisitor, Type
+from mypy.util import short_type
+
+
+files = [
+    'merge.test'
+]
+
+
+# Which data structures to dump in a test case?
+SYMTABLE = 'SYMTABLE'
+TYPEINFO = ' TYPEINFO'
+TYPES = 'TYPES'
+AST = 'AST'
+
+
+class ASTMergeSuite(DataSuite):
+    def __init__(self, *, update_data: bool) -> None:
+        self.str_conv = StrConv(show_ids=True)
+        self.id_mapper = self.str_conv.id_mapper
+        self.type_str_conv = TypeStrVisitor(self.id_mapper)
+
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  None, test_temp_dir, True)
+        return c
+
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        name = testcase.name
+        # We use the test case name to decide which data structures to dump.
+        # Dumping everything would result in very verbose test cases.
+        if name.endswith('_symtable'):
+            kind = SYMTABLE
+        elif name.endswith('_typeinfo'):
+            kind = TYPEINFO
+        elif name.endswith('_types'):
+            kind = TYPES
+        else:
+            kind = AST
+
+        main_src = '\n'.join(testcase.input)
+        messages, manager, graph = self.build(main_src)
+
+        a = []
+        if messages:
+            a.extend(messages)
+
+        shutil.copy(os.path.join(test_temp_dir, 'target.py.next'),
+                    os.path.join(test_temp_dir, 'target.py'))
+
+        a.extend(self.dump(manager.modules, graph, kind))
+
+        old_modules = dict(manager.modules)
+        old_subexpr = get_subexpressions(old_modules['target'])
+
+        new_file, new_types = self.build_increment(manager, 'target')
+        replace_modules_with_new_variants(manager,
+                                          graph,
+                                          old_modules,
+                                          {'target': new_file},
+                                          {'target': new_types})
+
+        a.append('==>')
+        a.extend(self.dump(manager.modules, graph, kind))
+
+        for expr in old_subexpr:
+            # Verify that old AST nodes are removed from the expression type map.
+            assert expr not in new_types
+
+        assert_string_arrays_equal(
+            testcase.output, a,
+            'Invalid output ({}, line {})'.format(testcase.file,
+                                                  testcase.line))
+
+    def build(self, source: str) -> Tuple[List[str], BuildManager, Dict[str, State]]:
+        options = Options()
+        options.use_builtins_fixtures = True
+        options.show_traceback = True
+        options.cache_dir = os.devnull
+        try:
+            result = build.build(sources=[BuildSource('main', None, source)],
+                                 options=options,
+                                 alt_lib_path=test_temp_dir)
+        except CompileError as e:
+            # TODO: Is it okay to return None?
+            return e.messages, None, {}
+        return result.errors, result.manager, result.graph
+
+    def build_increment(self, manager: BuildManager,
+                        module_id: str) -> Tuple[MypyFile,
+                                                 Dict[Expression, Type]]:
+        module_dict, type_maps = build_incremental_step(manager, [module_id])
+        return module_dict[module_id], type_maps[module_id]
+
+    def dump(self,
+             modules: Dict[str, MypyFile],
+             graph: Dict[str, State],
+             kind: str) -> List[str]:
+        if kind == AST:
+            return self.dump_asts(modules)
+        elif kind == TYPEINFO:
+            return self.dump_typeinfos(modules)
+        elif kind == SYMTABLE:
+            return self.dump_symbol_tables(modules)
+        elif kind == TYPES:
+            return self.dump_types(graph)
+        assert False, 'Invalid kind %s' % kind
+
+    def dump_asts(self, modules: Dict[str, MypyFile]) -> List[str]:
+        a = []
+        for m in sorted(modules):
+            if m == 'builtins':
+                # We don't support incremental checking of changes to builtins.
+                continue
+            s = modules[m].accept(self.str_conv)
+            a.extend(s.splitlines())
+        return a
+
+    def dump_symbol_tables(self, modules: Dict[str, MypyFile]) -> List[str]:
+        a = []
+        for id in sorted(modules):
+            if id == 'builtins':
+                # We don't support incremental checking of changes to builtins.
+                continue
+            a.extend(self.dump_symbol_table(id, modules[id].names))
+        return a
+
+    def dump_symbol_table(self, module_id: str, symtable: SymbolTable) -> List[str]:
+        a = ['{}:'.format(module_id)]
+        for name in sorted(symtable):
+            if name.startswith('__'):
+                continue
+            a.append('    {}: {}'.format(name, self.format_symbol_table_node(symtable[name])))
+        return a
+
+    def format_symbol_table_node(self, node: SymbolTableNode) -> str:
+        if node is None:
+            return 'None'
+        if isinstance(node.node, Node):
+            return '{}<{}>'.format(str(type(node.node).__name__),
+                                   self.id_mapper.id(node.node))
+        # TODO: type_override?
+        return '?'
+
+    def dump_typeinfos(self, modules: Dict[str, MypyFile]) -> List[str]:
+        a = []
+        for id in sorted(modules):
+            if id == 'builtins':
+                continue
+            a.extend(self.dump_typeinfos_recursive(modules[id].names))
+        return a
+
+    def dump_typeinfos_recursive(self, names: SymbolTable) -> List[str]:
+        a = []
+        for name, node in sorted(names.items(), key=lambda x: x[0]):
+            if isinstance(node.node, TypeInfo):
+                a.extend(self.dump_typeinfo(node.node))
+                a.extend(self.dump_typeinfos_recursive(node.node.names))
+        return a
+
+    def dump_typeinfo(self, info: TypeInfo) -> List[str]:
+        s = info.dump(str_conv=self.str_conv,
+                      type_str_conv=self.type_str_conv)
+        return s.splitlines()
+
+    def dump_types(self, graph: Dict[str, State]) -> List[str]:
+        a = []
+        # To make the results repeatable, we try to generate unique and
+        # deterministic sort keys.
+        for module_id in sorted(graph):
+            if module_id == 'builtins':
+                continue
+            type_map = graph[module_id].type_checker.type_map
+            if type_map:
+                a.append('## {}'.format(module_id))
+                for expr in sorted(type_map, key=lambda n: (n.line, short_type(n),
+                                                            str(n) + str(type_map[n]))):
+                    typ = type_map[expr]
+                    a.append('{}:{}: {}'.format(short_type(expr),
+                                                expr.line,
+                                                typ.accept(self.type_str_conv)))
+        return a
diff --git a/mypy/test/testmoduleinfo.py b/mypy/test/testmoduleinfo.py
new file mode 100644
index 0000000..5818479
--- /dev/null
+++ b/mypy/test/testmoduleinfo.py
@@ -0,0 +1,14 @@
+from mypy import moduleinfo
+from mypy.myunit import (
+    Suite, assert_equal, assert_true, assert_false
+)
+
+
+class ModuleInfoSuite(Suite):
+    def test_is_in_module_collection(self) -> None:
+        assert_true(moduleinfo.is_in_module_collection({'foo'}, 'foo'))
+        assert_true(moduleinfo.is_in_module_collection({'foo'}, 'foo.bar'))
+        assert_false(moduleinfo.is_in_module_collection({'foo'}, 'fo'))
+        assert_true(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo.bar'))
+        assert_true(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo.bar.zar'))
+        assert_false(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo'))
diff --git a/mypy/test/testparse.py b/mypy/test/testparse.py
new file mode 100644
index 0000000..d6789c0
--- /dev/null
+++ b/mypy/test/testparse.py
@@ -0,0 +1,79 @@
+"""Tests for the mypy parser."""
+
+import os.path
+
+from typing import List
+
+from mypy import defaults
+from mypy.myunit import Suite, AssertionFailure
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test import config
+from mypy.parse import parse
+from mypy.errors import CompileError
+from mypy.options import Options
+
+
+class ParserSuite(Suite):
+    parse_files = ['parse.test',
+                   'parse-python2.test']
+
+    def cases(self) -> List[DataDrivenTestCase]:
+        # The test case descriptions are stored in data files.
+        c = []  # type: List[DataDrivenTestCase]
+        for f in self.parse_files:
+            c += parse_test_cases(
+                os.path.join(config.test_data_prefix, f), test_parser)
+        return c
+
+
+def test_parser(testcase: DataDrivenTestCase) -> None:
+    """Perform a single parser test case.
+
+    The argument contains the description of the test case.
+    """
+    options = Options()
+
+    if testcase.file.endswith('python2.test'):
+        options.python_version = defaults.PYTHON2_VERSION
+    else:
+        options.python_version = defaults.PYTHON3_VERSION
+
+    try:
+        n = parse(bytes('\n'.join(testcase.input), 'ascii'),
+                  fnam='main',
+                  errors=None,
+                  options=options)
+        a = str(n).split('\n')
+    except CompileError as e:
+        a = e.messages
+    assert_string_arrays_equal(testcase.output, a,
+                               'Invalid parser output ({}, line {})'.format(
+                                   testcase.file, testcase.line))
+
+
+# The file name shown in test case output. This is displayed in error
+# messages, and must match the file name in the test case descriptions.
+INPUT_FILE_NAME = 'file'
+
+
+class ParseErrorSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        # Test case descriptions are in an external file.
+        return parse_test_cases(os.path.join(config.test_data_prefix,
+                                             'parse-errors.test'),
+                                test_parse_error)
+
+
+def test_parse_error(testcase: DataDrivenTestCase) -> None:
+    try:
+        # Compile temporary file. The test file contains non-ASCII characters.
+        parse(bytes('\n'.join(testcase.input), 'utf-8'), INPUT_FILE_NAME, None, Options())
+        raise AssertionFailure('No errors reported')
+    except CompileError as e:
+        # Verify that there was a compile error and that the error messages
+        # are equivalent.
+        assert_string_arrays_equal(
+            testcase.output, e.messages,
+            'Invalid compiler output ({}, line {})'.format(testcase.file,
+                                                           testcase.line))
diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py
new file mode 100644
index 0000000..602692e
--- /dev/null
+++ b/mypy/test/testpythoneval.py
@@ -0,0 +1,128 @@
+"""Test cases for running mypy programs using a Python interpreter.
+
+Each test case type checks a program then runs it using Python. The
+output (stdout) of the program is compared to expected output. Type checking
+uses full builtins and other stubs.
+
+Note: Currently Python interpreter paths are hard coded.
+
+Note: These test cases are *not* included in the main test suite, as including
+      this suite would slow down the main suite too much.
+"""
+
+from contextlib import contextmanager
+import errno
+import os
+import os.path
+import re
+import subprocess
+import sys
+
+import typing
+from typing import Dict, List, Tuple
+
+from mypy.myunit import Suite, SkipTestCaseException
+from mypy.test.config import test_data_prefix, test_temp_dir
+from mypy.test.data import DataDrivenTestCase, parse_test_cases
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.util import try_find_python2_interpreter
+
+
+# Files which contain test case descriptions.
+python_eval_files = ['pythoneval.test',
+                     'python2eval.test']
+
+python_34_eval_files = ['pythoneval-asyncio.test']
+
+# Path to Python 3 interpreter
+python3_path = sys.executable
+program_re = re.compile(r'\b_program.py\b')
+
+
+class PythonEvaluationSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in python_eval_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  test_python_evaluation, test_temp_dir, True)
+        if sys.version_info.major == 3 and sys.version_info.minor >= 4:
+            for f in python_34_eval_files:
+                c += parse_test_cases(os.path.join(test_data_prefix, f),
+                    test_python_evaluation, test_temp_dir, True)
+        return c
+
+
+def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
+    """Runs Mypy in a subprocess.
+
+    If this passes without errors, executes the script again with a given Python
+    version.
+    """
+    mypy_cmdline = [
+        python3_path,
+        os.path.join(testcase.old_cwd, 'scripts', 'mypy'),
+        '--show-traceback',
+    ]
+    py2 = testcase.name.lower().endswith('python2')
+    if py2:
+        mypy_cmdline.append('--py2')
+        interpreter = try_find_python2_interpreter()
+        if not interpreter:
+            # Skip, can't find a Python 2 interpreter.
+            raise SkipTestCaseException()
+    else:
+        interpreter = python3_path
+
+    # Write the program to a file.
+    program = '_' + testcase.name + '.py'
+    mypy_cmdline.append(program)
+    program_path = os.path.join(test_temp_dir, program)
+    with open(program_path, 'w') as file:
+        for s in testcase.input:
+            file.write('{}\n'.format(s))
+    # Type check the program.
+    # This uses the same PYTHONPATH as the current process.
+    returncode, out = run(mypy_cmdline)
+    if returncode == 0:
+        # Execute the program.
+        returncode, interp_out = run([interpreter, program])
+        out += interp_out
+    # Remove temp file.
+    os.remove(program_path)
+    assert_string_arrays_equal(adapt_output(testcase), out,
+                               'Invalid output ({}, line {})'.format(
+                                   testcase.file, testcase.line))
+
+
+def split_lines(*streams: bytes) -> List[str]:
+    """Returns a single list of string lines from the byte streams in args."""
+    return [
+        s.rstrip('\n\r')
+        for stream in streams
+        for s in str(stream, 'utf8').splitlines()
+    ]
+
+
+def adapt_output(testcase: DataDrivenTestCase) -> List[str]:
+    """Translates the generic _program.py into the actual filename."""
+    program = '_' + testcase.name + '.py'
+    return [program_re.sub(program, line) for line in testcase.output]
+
+
+def run(
+    cmdline: List[str], *, env: Dict[str, str] = None, timeout: int = 30
+) -> Tuple[int, List[str]]:
+    """A poor man's subprocess.run() for 3.3 and 3.4 compatibility."""
+    process = subprocess.Popen(
+        cmdline,
+        env=env,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        cwd=test_temp_dir,
+    )
+    try:
+        out, err = process.communicate(timeout=timeout)
+    except subprocess.TimeoutExpired:
+        out = err = b''
+        process.kill()
+    return process.returncode, split_lines(out, err)
diff --git a/mypy/test/testreports.py b/mypy/test/testreports.py
new file mode 100644
index 0000000..80e6980
--- /dev/null
+++ b/mypy/test/testreports.py
@@ -0,0 +1,40 @@
+"""Test cases for reports generated by mypy."""
+import textwrap
+
+from mypy.myunit import Suite, assert_equal
+from mypy.report import CoberturaPackage, get_line_rate
+
+import lxml.etree as etree
+
+
+class CoberturaReportSuite(Suite):
+    def test_get_line_rate(self) -> None:
+        assert_equal('1.0', get_line_rate(0, 0))
+        assert_equal('0.3333', get_line_rate(1, 3))
+
+    def test_as_xml(self) -> None:
+        cobertura_package = CoberturaPackage('foobar')
+        cobertura_package.covered_lines = 21
+        cobertura_package.total_lines = 42
+
+        child_package = CoberturaPackage('raz')
+        child_package.covered_lines = 10
+        child_package.total_lines = 10
+        child_package.classes['class'] = etree.Element('class')
+
+        cobertura_package.packages['raz'] = child_package
+
+        expected_output = textwrap.dedent('''\
+            <package complexity="1.0" name="foobar" branch-rate="0" line-rate="0.5000">
+              <classes/>
+              <packages>
+                <package complexity="1.0" name="raz" branch-rate="0" line-rate="1.0000">
+                  <classes>
+                    <class/>
+                  </classes>
+                </package>
+              </packages>
+            </package>
+        ''').encode('ascii')
+        assert_equal(expected_output,
+                     etree.tostring(cobertura_package.as_xml(), pretty_print=True))
diff --git a/mypy/test/testsemanal.py b/mypy/test/testsemanal.py
new file mode 100644
index 0000000..d339a83
--- /dev/null
+++ b/mypy/test/testsemanal.py
@@ -0,0 +1,227 @@
+"""Semantic analyzer test cases"""
+
+import os.path
+
+from typing import Dict, List
+
+from mypy import build
+from mypy.build import BuildSource
+from mypy.myunit import Suite
+from mypy.test.helpers import (
+    assert_string_arrays_equal, normalize_error_messages, testfile_pyversion,
+)
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.config import test_data_prefix, test_temp_dir
+from mypy.errors import CompileError
+from mypy.nodes import TypeInfo
+from mypy.options import Options
+
+
+# Semantic analyzer test cases: dump parse tree
+
+# Semantic analysis test case description files.
+semanal_files = ['semanal-basic.test',
+                 'semanal-expressions.test',
+                 'semanal-classes.test',
+                 'semanal-types.test',
+                 'semanal-typealiases.test',
+                 'semanal-modules.test',
+                 'semanal-statements.test',
+                 'semanal-abstractclasses.test',
+                 'semanal-namedtuple.test',
+                 'semanal-typeddict.test',
+                 'semanal-classvar.test',
+                 'semanal-python2.test']
+
+
+def get_semanal_options() -> Options:
+    options = Options()
+    options.use_builtins_fixtures = True
+    options.semantic_analysis_only = True
+    options.show_traceback = True
+    return options
+
+
+class SemAnalSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in semanal_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  test_semanal,
+                                  base_path=test_temp_dir,
+                                  optional_out=True,
+                                  native_sep=True)
+        return c
+
+
+def test_semanal(testcase: DataDrivenTestCase) -> None:
+    """Perform a semantic analysis test case.
+
+    The testcase argument contains a description of the test case
+    (inputs and output).
+    """
+
+    try:
+        src = '\n'.join(testcase.input)
+        options = get_semanal_options()
+        options.python_version = testfile_pyversion(testcase.file)
+        result = build.build(sources=[BuildSource('main', None, src)],
+                             options=options,
+                             alt_lib_path=test_temp_dir)
+        a = result.errors
+        if a:
+            raise CompileError(a)
+        # Include string representations of the source files in the actual
+        # output.
+        for fnam in sorted(result.files.keys()):
+            f = result.files[fnam]
+            # Omit the builtins module and files with a special marker in the
+            # path.
+            # TODO the test is not reliable
+            if (not f.path.endswith((os.sep + 'builtins.pyi',
+                                     'typing.pyi',
+                                     'mypy_extensions.pyi',
+                                     'abc.pyi',
+                                     'collections.pyi'))
+                    and not os.path.basename(f.path).startswith('_')
+                    and not os.path.splitext(
+                        os.path.basename(f.path))[0].endswith('_')):
+                a += str(f).split('\n')
+    except CompileError as e:
+        a = e.messages
+    a = normalize_error_messages(a)
+    assert_string_arrays_equal(
+        testcase.output, a,
+        'Invalid semantic analyzer output ({}, line {})'.format(testcase.file,
+                                                                testcase.line))
+
+
+# Semantic analyzer error test cases
+
+# Paths to files containing test case descriptions.
+semanal_error_files = ['semanal-errors.test']
+
+
+class SemAnalErrorSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        # Read test cases from test case description files.
+        c = []  # type: List[DataDrivenTestCase]
+        for f in semanal_error_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  test_semanal_error, test_temp_dir, optional_out=True)
+        return c
+
+
+def test_semanal_error(testcase: DataDrivenTestCase) -> None:
+    """Perform a test case."""
+
+    try:
+        src = '\n'.join(testcase.input)
+        res = build.build(sources=[BuildSource('main', None, src)],
+                          options=get_semanal_options(),
+                          alt_lib_path=test_temp_dir)
+        a = res.errors
+        assert a, 'No errors reported in {}, line {}'.format(testcase.file, testcase.line)
+    except CompileError as e:
+        # Verify that there was a compile error and that the error messages
+        # are equivalent.
+        a = e.messages
+    assert_string_arrays_equal(
+        testcase.output, normalize_error_messages(a),
+        'Invalid compiler output ({}, line {})'.format(testcase.file, testcase.line))
+
+
+# SymbolNode table export test cases
+
+# Test case descriptions
+semanal_symtable_files = ['semanal-symtable.test']
+
+
+class SemAnalSymtableSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in semanal_symtable_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  self.run_test, test_temp_dir)
+        return c
+
+    def run_test(self, testcase: DataDrivenTestCase) -> None:
+        """Perform a test case."""
+        try:
+            # Build test case input.
+            src = '\n'.join(testcase.input)
+            result = build.build(sources=[BuildSource('main', None, src)],
+                                 options=get_semanal_options(),
+                                 alt_lib_path=test_temp_dir)
+            # The output is the symbol table converted into a string.
+            a = result.errors
+            if a:
+                raise CompileError(a)
+            for f in sorted(result.files.keys()):
+                if f not in ('builtins', 'typing', 'abc'):
+                    a.append('{}:'.format(f))
+                    for s in str(result.files[f].names).split('\n'):
+                        a.append('  ' + s)
+        except CompileError as e:
+            a = e.messages
+        assert_string_arrays_equal(
+            testcase.output, a,
+            'Invalid semantic analyzer output ({}, line {})'.format(
+                testcase.file, testcase.line))
+
+
+# Type info export test cases
+
+semanal_typeinfo_files = ['semanal-typeinfo.test']
+
+
+class SemAnalTypeInfoSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        """Test case descriptions"""
+        c = []  # type: List[DataDrivenTestCase]
+        for f in semanal_typeinfo_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  self.run_test, test_temp_dir)
+        return c
+
+    def run_test(self, testcase: DataDrivenTestCase) -> None:
+        """Perform a test case."""
+        try:
+            # Build test case input.
+            src = '\n'.join(testcase.input)
+            result = build.build(sources=[BuildSource('main', None, src)],
+                                 options=get_semanal_options(),
+                                 alt_lib_path=test_temp_dir)
+            a = result.errors
+            if a:
+                raise CompileError(a)
+
+            # Collect all TypeInfos in top-level modules.
+            typeinfos = TypeInfoMap()
+            for f in result.files.values():
+                for n in f.names.values():
+                    if isinstance(n.node, TypeInfo):
+                        assert n.fullname is not None
+                        typeinfos[n.fullname] = n.node
+
+            # The output is the symbol table converted into a string.
+            a = str(typeinfos).split('\n')
+        except CompileError as e:
+            a = e.messages
+        assert_string_arrays_equal(
+            testcase.output, a,
+            'Invalid semantic analyzer output ({}, line {})'.format(
+                testcase.file, testcase.line))
+
+
+class TypeInfoMap(Dict[str, TypeInfo]):
+    def __str__(self) -> str:
+        a = ['TypeInfoMap(']  # type: List[str]
+        for x, y in sorted(self.items()):
+            if isinstance(x, str) and (not x.startswith('builtins.') and
+                                       not x.startswith('typing.') and
+                                       not x.startswith('abc.')):
+                ti = ('\n' + '  ').join(str(y).split('\n'))
+                a.append('  {} : {}'.format(x, ti))
+        a[-1] += ')'
+        return '\n'.join(a)
diff --git a/mypy/test/testsolve.py b/mypy/test/testsolve.py
new file mode 100644
index 0000000..1b8dc83
--- /dev/null
+++ b/mypy/test/testsolve.py
@@ -0,0 +1,132 @@
+"""Test cases for the constraint solver used in type inference."""
+
+from typing import List, Union, Tuple, Optional
+
+from mypy.myunit import Suite, assert_equal
+from mypy.constraints import SUPERTYPE_OF, SUBTYPE_OF, Constraint
+from mypy.solve import solve_constraints
+from mypy.typefixture import TypeFixture
+from mypy.types import Type, TypeVarType, TypeVarId
+
+
+class SolveSuite(Suite):
+    def __init__(self) -> None:
+        super().__init__()
+        self.fx = TypeFixture()
+
+    def test_empty_input(self) -> None:
+        self.assert_solve([], [], [])
+
+    def test_simple_supertype_constraints(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.a)],
+                          [(self.fx.a, self.fx.o)])
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.a),
+                           self.supc(self.fx.t, self.fx.b)],
+                          [(self.fx.a, self.fx.o)])
+
+    def test_simple_subtype_constraints(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.subc(self.fx.t, self.fx.a)],
+                          [self.fx.a])
+        self.assert_solve([self.fx.t.id],
+                          [self.subc(self.fx.t, self.fx.a),
+                           self.subc(self.fx.t, self.fx.b)],
+                          [self.fx.b])
+
+    def test_both_kinds_of_constraints(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.b),
+                           self.subc(self.fx.t, self.fx.a)],
+                          [(self.fx.b, self.fx.a)])
+
+    def test_unsatisfiable_constraints(self) -> None:
+        # The constraints are impossible to satisfy.
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.a),
+                           self.subc(self.fx.t, self.fx.b)],
+                          [None])
+
+    def test_exactly_specified_result(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.b),
+                           self.subc(self.fx.t, self.fx.b)],
+                          [(self.fx.b, self.fx.b)])
+
+    def test_multiple_variables(self) -> None:
+        self.assert_solve([self.fx.t.id, self.fx.s.id],
+                          [self.supc(self.fx.t, self.fx.b),
+                           self.supc(self.fx.s, self.fx.c),
+                           self.subc(self.fx.t, self.fx.a)],
+                          [(self.fx.b, self.fx.a), (self.fx.c, self.fx.o)])
+
+    def test_no_constraints_for_var(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [],
+                          [self.fx.uninhabited])
+        self.assert_solve([self.fx.t.id, self.fx.s.id],
+                          [],
+                          [self.fx.uninhabited, self.fx.uninhabited])
+        self.assert_solve([self.fx.t.id, self.fx.s.id],
+                          [self.supc(self.fx.s, self.fx.a)],
+                          [self.fx.uninhabited, (self.fx.a, self.fx.o)])
+
+    def test_simple_constraints_with_dynamic_type(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.anyt)],
+                          [(self.fx.anyt, self.fx.anyt)])
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.anyt),
+                           self.supc(self.fx.t, self.fx.anyt)],
+                          [(self.fx.anyt, self.fx.anyt)])
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.anyt),
+                           self.supc(self.fx.t, self.fx.a)],
+                          [(self.fx.anyt, self.fx.anyt)])
+
+        self.assert_solve([self.fx.t.id],
+                          [self.subc(self.fx.t, self.fx.anyt)],
+                          [(self.fx.anyt, self.fx.anyt)])
+        self.assert_solve([self.fx.t.id],
+                          [self.subc(self.fx.t, self.fx.anyt),
+                           self.subc(self.fx.t, self.fx.anyt)],
+                          [(self.fx.anyt, self.fx.anyt)])
+        # self.assert_solve([self.fx.t.id],
+        #                   [self.subc(self.fx.t, self.fx.anyt),
+        #                    self.subc(self.fx.t, self.fx.a)],
+        #                   [(self.fx.anyt, self.fx.anyt)])
+        # TODO: figure out what this should be after changes to meet(any, X)
+
+    def test_both_normal_and_any_types_in_results(self) -> None:
+        # If one of the bounds is any, we promote the other bound to
+        # any as well, since otherwise the type range does not make sense.
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.a),
+                           self.subc(self.fx.t, self.fx.anyt)],
+                          [(self.fx.anyt, self.fx.anyt)])
+
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.anyt),
+                           self.subc(self.fx.t, self.fx.a)],
+                          [(self.fx.anyt, self.fx.anyt)])
+
+    def assert_solve(self,
+                     vars: List[TypeVarId],
+                     constraints: List[Constraint],
+                     results: List[Union[None, Type, Tuple[Type, Type]]],
+                     ) -> None:
+        res = []  # type: List[Optional[Type]]
+        for r in results:
+            if isinstance(r, tuple):
+                res.append(r[0])
+            else:
+                res.append(r)
+        actual = solve_constraints(vars, constraints)
+        assert_equal(str(actual), str(res))
+
+    def supc(self, type_var: TypeVarType, bound: Type) -> Constraint:
+        return Constraint(type_var.id, SUPERTYPE_OF, bound)
+
+    def subc(self, type_var: TypeVarType, bound: Type) -> Constraint:
+        return Constraint(type_var.id, SUBTYPE_OF, bound)
diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py
new file mode 100644
index 0000000..9f7ecfb
--- /dev/null
+++ b/mypy/test/teststubgen.py
@@ -0,0 +1,204 @@
+import glob
+import importlib
+import os.path
+import random
+import shutil
+import sys
+import tempfile
+import time
+import re
+from types import ModuleType
+
+from typing import List, Tuple
+
+from mypy.myunit import Suite, AssertionFailure, assert_equal
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test import config
+from mypy.parse import parse
+from mypy.errors import CompileError
+from mypy.stubgen import generate_stub, generate_stub_for_module, parse_options, Options
+from mypy.stubgenc import generate_c_type_stub, infer_method_sig
+from mypy.stubutil import (
+    parse_signature, parse_all_signatures, build_signature, find_unique_signatures,
+    infer_sig_from_docstring
+)
+
+
+class StubgenUtilSuite(Suite):
+    def test_parse_signature(self) -> None:
+        self.assert_parse_signature('func()', ('func', [], []))
+
+    def test_parse_signature_with_args(self) -> None:
+        self.assert_parse_signature('func(arg)', ('func', ['arg'], []))
+        self.assert_parse_signature('do(arg, arg2)', ('do', ['arg', 'arg2'], []))
+
+    def test_parse_signature_with_optional_args(self) -> None:
+        self.assert_parse_signature('func([arg])', ('func', [], ['arg']))
+        self.assert_parse_signature('func(arg[, arg2])', ('func', ['arg'], ['arg2']))
+        self.assert_parse_signature('func([arg[, arg2]])', ('func', [], ['arg', 'arg2']))
+
+    def test_parse_signature_with_default_arg(self) -> None:
+        self.assert_parse_signature('func(arg=None)', ('func', [], ['arg']))
+        self.assert_parse_signature('func(arg, arg2=None)', ('func', ['arg'], ['arg2']))
+        self.assert_parse_signature('func(arg=1, arg2="")', ('func', [], ['arg', 'arg2']))
+
+    def test_parse_signature_with_qualified_function(self) -> None:
+        self.assert_parse_signature('ClassName.func(arg)', ('func', ['arg'], []))
+
+    def test_parse_signature_with_kw_only_arg(self) -> None:
+        self.assert_parse_signature('ClassName.func(arg, *, arg2=1)',
+                                    ('func', ['arg', '*'], ['arg2']))
+
+    def test_parse_signature_with_star_arg(self) -> None:
+        self.assert_parse_signature('ClassName.func(arg, *args)',
+                                    ('func', ['arg', '*args'], []))
+
+    def test_parse_signature_with_star_star_arg(self) -> None:
+        self.assert_parse_signature('ClassName.func(arg, **args)',
+                                    ('func', ['arg', '**args'], []))
+
+    def assert_parse_signature(self, sig: str, result: Tuple[str, List[str], List[str]]) -> None:
+        assert_equal(parse_signature(sig), result)
+
+    def test_build_signature(self) -> None:
+        assert_equal(build_signature([], []), '()')
+        assert_equal(build_signature(['arg'], []), '(arg)')
+        assert_equal(build_signature(['arg', 'arg2'], []), '(arg, arg2)')
+        assert_equal(build_signature(['arg'], ['arg2']), '(arg, arg2=...)')
+        assert_equal(build_signature(['arg'], ['arg2', '**x']), '(arg, arg2=..., **x)')
+
+    def test_parse_all_signatures(self) -> None:
+        assert_equal(parse_all_signatures(['random text',
+                                           '.. function:: fn(arg',
+                                           '.. function:: fn()',
+                                           '  .. method:: fn2(arg)']),
+                     ([('fn', '()'),
+                       ('fn2', '(arg)')], []))
+
+    def test_find_unique_signatures(self) -> None:
+        assert_equal(find_unique_signatures(
+            [('func', '()'),
+             ('func', '()'),
+             ('func2', '()'),
+             ('func2', '(arg)'),
+             ('func3', '(arg, arg2)')]),
+            [('func', '()'),
+             ('func3', '(arg, arg2)')])
+
+    def test_infer_sig_from_docstring(self) -> None:
+        assert_equal(infer_sig_from_docstring('\nfunc(x) - y', 'func'), '(x)')
+        assert_equal(infer_sig_from_docstring('\nfunc(x, Y_a=None)', 'func'), '(x, Y_a=None)')
+        assert_equal(infer_sig_from_docstring('\nafunc(x) - y', 'func'), None)
+        assert_equal(infer_sig_from_docstring('\nfunc(x, y', 'func'), None)
+        assert_equal(infer_sig_from_docstring('\nfunc(x=z(y))', 'func'), None)
+        assert_equal(infer_sig_from_docstring('\nfunc x', 'func'), None)
+
+
+class StubgenPythonSuite(Suite):
+    test_data_files = ['stubgen.test']
+
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for path in self.test_data_files:
+            c += parse_test_cases(os.path.join(config.test_data_prefix, path), test_stubgen)
+        return c
+
+
+def parse_flags(program_text: str) -> Options:
+    flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE)
+    if flags:
+        flag_list = flags.group(1).split()
+    else:
+        flag_list = []
+    return parse_options(flag_list + ['dummy.py'])
+
+
+def test_stubgen(testcase: DataDrivenTestCase) -> None:
+    if 'stubgen-test-path' not in sys.path:
+        sys.path.insert(0, 'stubgen-test-path')
+    os.mkdir('stubgen-test-path')
+    source = '\n'.join(testcase.input)
+    options = parse_flags(source)
+    handle = tempfile.NamedTemporaryFile(prefix='prog_', suffix='.py', dir='stubgen-test-path',
+                                         delete=False)
+    assert os.path.isabs(handle.name)
+    path = os.path.basename(handle.name)
+    name = path[:-3]
+    path = os.path.join('stubgen-test-path', path)
+    out_dir = '_out'
+    os.mkdir(out_dir)
+    try:
+        handle.write(bytes(source, 'ascii'))
+        handle.close()
+        # Without this we may sometimes be unable to import the module below, as importlib
+        # caches os.listdir() results in Python 3.3+ (Guido explained this to me).
+        reset_importlib_caches()
+        try:
+            if testcase.name.endswith('_import'):
+                generate_stub_for_module(name, out_dir, quiet=True,
+                                         no_import=options.no_import,
+                                         include_private=options.include_private)
+            else:
+                generate_stub(path, out_dir, include_private=options.include_private)
+            a = load_output(out_dir)
+        except CompileError as e:
+            a = e.messages
+        assert_string_arrays_equal(testcase.output, a,
+                                   'Invalid output ({}, line {})'.format(
+                                       testcase.file, testcase.line))
+    finally:
+        handle.close()
+        os.unlink(handle.name)
+        shutil.rmtree(out_dir)
+
+
+def reset_importlib_caches() -> None:
+    try:
+        importlib.invalidate_caches()
+    except (ImportError, AttributeError):
+        pass
+
+
+def load_output(dirname: str) -> List[str]:
+    result = []  # type: List[str]
+    entries = glob.glob('%s/*' % dirname)
+    assert entries, 'No files generated'
+    if len(entries) == 1:
+        add_file(entries[0], result)
+    else:
+        for entry in entries:
+            result.append('## %s ##' % entry)
+            add_file(entry, result)
+    return result
+
+
+def add_file(path: str, result: List[str]) -> None:
+    with open(path) as file:
+        result.extend(file.read().splitlines())
+
+
+class StubgencSuite(Suite):
+    def test_infer_hash_sig(self) -> None:
+        assert_equal(infer_method_sig('__hash__'), '()')
+
+    def test_infer_getitem_sig(self) -> None:
+        assert_equal(infer_method_sig('__getitem__'), '(index)')
+
+    def test_infer_setitem_sig(self) -> None:
+        assert_equal(infer_method_sig('__setitem__'), '(index, object)')
+
+    def test_infer_binary_op_sig(self) -> None:
+        for op in ('eq', 'ne', 'lt', 'le', 'gt', 'ge',
+                   'add', 'radd', 'sub', 'rsub', 'mul', 'rmul'):
+            assert_equal(infer_method_sig('__%s__' % op), '(other)')
+
+    def test_infer_unary_op_sig(self) -> None:
+        for op in ('neg', 'pos'):
+            assert_equal(infer_method_sig('__%s__' % op), '()')
+
+    def test_generate_c_type_stub_no_crash_for_object(self) -> None:
+        output = []  # type: List[str]
+        mod = ModuleType('module', '')  # any module is fine
+        generate_c_type_stub(mod, 'alias', object, output)
+        assert_equal(output[0], 'class alias:')
diff --git a/mypy/test/testsubtypes.py b/mypy/test/testsubtypes.py
new file mode 100644
index 0000000..2d03945
--- /dev/null
+++ b/mypy/test/testsubtypes.py
@@ -0,0 +1,207 @@
+from mypy.myunit import Suite, assert_true
+from mypy.nodes import CONTRAVARIANT, INVARIANT, COVARIANT
+from mypy.subtypes import is_subtype
+from mypy.typefixture import TypeFixture, InterfaceTypeFixture
+from mypy.types import Type
+
+
+class SubtypingSuite(Suite):
+    def set_up(self) -> None:
+        self.fx = TypeFixture(INVARIANT)
+        self.fx_contra = TypeFixture(CONTRAVARIANT)
+        self.fx_co = TypeFixture(COVARIANT)
+
+    def test_trivial_cases(self) -> None:
+        for simple in self.fx_co.a, self.fx_co.o, self.fx_co.b:
+            self.assert_subtype(simple, simple)
+
+    def test_instance_subtyping(self) -> None:
+        self.assert_strict_subtype(self.fx.a, self.fx.o)
+        self.assert_strict_subtype(self.fx.b, self.fx.o)
+        self.assert_strict_subtype(self.fx.b, self.fx.a)
+
+        self.assert_not_subtype(self.fx.a, self.fx.d)
+        self.assert_not_subtype(self.fx.b, self.fx.c)
+
+    def test_simple_generic_instance_subtyping_invariant(self) -> None:
+        self.assert_subtype(self.fx.ga, self.fx.ga)
+        self.assert_subtype(self.fx.hab, self.fx.hab)
+
+        self.assert_not_subtype(self.fx.ga, self.fx.g2a)
+        self.assert_not_subtype(self.fx.ga, self.fx.gb)
+        self.assert_not_subtype(self.fx.gb, self.fx.ga)
+
+    def test_simple_generic_instance_subtyping_covariant(self) -> None:
+        self.assert_subtype(self.fx_co.ga, self.fx_co.ga)
+        self.assert_subtype(self.fx_co.hab, self.fx_co.hab)
+
+        self.assert_not_subtype(self.fx_co.ga, self.fx_co.g2a)
+        self.assert_not_subtype(self.fx_co.ga, self.fx_co.gb)
+        self.assert_subtype(self.fx_co.gb, self.fx_co.ga)
+
+    def test_simple_generic_instance_subtyping_contravariant(self) -> None:
+        self.assert_subtype(self.fx_contra.ga, self.fx_contra.ga)
+        self.assert_subtype(self.fx_contra.hab, self.fx_contra.hab)
+
+        self.assert_not_subtype(self.fx_contra.ga, self.fx_contra.g2a)
+        self.assert_subtype(self.fx_contra.ga, self.fx_contra.gb)
+        self.assert_not_subtype(self.fx_contra.gb, self.fx_contra.ga)
+
+    def test_generic_subtyping_with_inheritance_invariant(self) -> None:
+        self.assert_subtype(self.fx.gsab, self.fx.gb)
+        self.assert_not_subtype(self.fx.gsab, self.fx.ga)
+        self.assert_not_subtype(self.fx.gsaa, self.fx.gb)
+
+    def test_generic_subtyping_with_inheritance_covariant(self) -> None:
+        self.assert_subtype(self.fx_co.gsab, self.fx_co.gb)
+        self.assert_subtype(self.fx_co.gsab, self.fx_co.ga)
+        self.assert_not_subtype(self.fx_co.gsaa, self.fx_co.gb)
+
+    def test_generic_subtyping_with_inheritance_contravariant(self) -> None:
+        self.assert_subtype(self.fx_contra.gsab, self.fx_contra.gb)
+        self.assert_not_subtype(self.fx_contra.gsab, self.fx_contra.ga)
+        self.assert_subtype(self.fx_contra.gsaa, self.fx_contra.gb)
+
+    def test_interface_subtyping(self) -> None:
+        self.assert_subtype(self.fx.e, self.fx.f)
+        self.assert_equivalent(self.fx.f, self.fx.f)
+        self.assert_not_subtype(self.fx.a, self.fx.f)
+
+    def test_generic_interface_subtyping(self) -> None:
+        # TODO make this work
+        self.skip()
+
+        fx2 = InterfaceTypeFixture()
+
+        self.assert_subtype(fx2.m1, fx2.gfa)
+        self.assert_not_subtype(fx2.m1, fx2.gfb)
+
+        self.assert_equivalent(fx2.gfa, fx2.gfa)
+
+    def test_basic_callable_subtyping(self) -> None:
+        self.assert_strict_subtype(self.fx.callable(self.fx.o, self.fx.d),
+                                   self.fx.callable(self.fx.a, self.fx.d))
+        self.assert_strict_subtype(self.fx.callable(self.fx.d, self.fx.b),
+                                   self.fx.callable(self.fx.d, self.fx.a))
+
+        self.assert_strict_subtype(self.fx.callable(self.fx.a, self.fx.nonet),
+                                   self.fx.callable(self.fx.a, self.fx.a))
+
+        self.assert_unrelated(
+            self.fx.callable(self.fx.a, self.fx.a, self.fx.a),
+            self.fx.callable(self.fx.a, self.fx.a))
+
+    def test_default_arg_callable_subtyping(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable(self.fx.a, self.fx.d, self.fx.a))
+
+        self.assert_strict_subtype(
+            self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable(self.fx.a, self.fx.a))
+
+        self.assert_strict_subtype(
+            self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a))
+
+        self.assert_unrelated(
+            self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable(self.fx.d, self.fx.d, self.fx.a))
+
+        self.assert_unrelated(
+            self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable_default(1, self.fx.a, self.fx.a, self.fx.a))
+
+        self.assert_unrelated(
+            self.fx.callable_default(1, self.fx.a, self.fx.a),
+            self.fx.callable(self.fx.a, self.fx.a, self.fx.a))
+
+    def test_var_arg_callable_subtyping_1(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.a),
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.a))
+
+    def test_var_arg_callable_subtyping_2(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.a),
+            self.fx.callable(self.fx.b, self.fx.a))
+
+    def test_var_arg_callable_subtyping_3(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.a),
+            self.fx.callable(self.fx.a))
+
+    def test_var_arg_callable_subtyping_4(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(1, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable(self.fx.b, self.fx.a))
+
+    def test_var_arg_callable_subtyping_5(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable(self.fx.b, self.fx.a))
+
+    def test_var_arg_callable_subtyping_6(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.f, self.fx.d),
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.e, self.fx.d))
+
+    def test_var_arg_callable_subtyping_7(self) -> None:
+        self.assert_not_subtype(
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.d),
+            self.fx.callable(self.fx.a, self.fx.d))
+
+    def test_var_arg_callable_subtyping_8(self) -> None:
+        self.assert_not_subtype(
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.d),
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d))
+        self.assert_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.d),
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d))
+
+    def test_var_arg_callable_subtyping_9(self) -> None:
+        self.assert_not_subtype(
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d),
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.d))
+        self.assert_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d),
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.d))
+
+    def test_type_callable_subtyping(self) -> None:
+        self.assert_subtype(
+            self.fx.callable_type(self.fx.d, self.fx.a), self.fx.type_type)
+
+        self.assert_strict_subtype(
+            self.fx.callable_type(self.fx.d, self.fx.b),
+            self.fx.callable(self.fx.d, self.fx.a))
+
+        self.assert_strict_subtype(self.fx.callable_type(self.fx.a, self.fx.b),
+                                   self.fx.callable(self.fx.a, self.fx.b))
+
+    # IDEA: Maybe add these test cases (they are tested pretty well in type
+    #       checker tests already):
+    #  * more interface subtyping test cases
+    #  * more generic interface subtyping test cases
+    #  * type variables
+    #  * tuple types
+    #  * None type
+    #  * any type
+    #  * generic function types
+
+    def assert_subtype(self, s: Type, t: Type) -> None:
+        assert_true(is_subtype(s, t), '{} not subtype of {}'.format(s, t))
+
+    def assert_not_subtype(self, s: Type, t: Type) -> None:
+        assert_true(not is_subtype(s, t), '{} subtype of {}'.format(s, t))
+
+    def assert_strict_subtype(self, s: Type, t: Type) -> None:
+        self.assert_subtype(s, t)
+        self.assert_not_subtype(t, s)
+
+    def assert_equivalent(self, s: Type, t: Type) -> None:
+        self.assert_subtype(s, t)
+        self.assert_subtype(t, s)
+
+    def assert_unrelated(self, s: Type, t: Type) -> None:
+        self.assert_not_subtype(s, t)
+        self.assert_not_subtype(t, s)
diff --git a/mypy/test/testtransform.py b/mypy/test/testtransform.py
new file mode 100644
index 0000000..0dcdd1d
--- /dev/null
+++ b/mypy/test/testtransform.py
@@ -0,0 +1,88 @@
+"""Identity AST transform test cases"""
+
+import os.path
+
+from typing import Dict, List
+
+from mypy import build
+from mypy.build import BuildSource
+from mypy.myunit import Suite
+from mypy.test.helpers import (
+    assert_string_arrays_equal, testfile_pyversion, normalize_error_messages
+)
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.config import test_data_prefix, test_temp_dir
+from mypy.errors import CompileError
+from mypy.nodes import TypeInfo
+from mypy.treetransform import TransformVisitor
+from mypy.types import Type
+from mypy.options import Options
+
+
+class TransformSuite(Suite):
+    # Reuse semantic analysis test cases.
+    transform_files = ['semanal-basic.test',
+                       'semanal-expressions.test',
+                       'semanal-classes.test',
+                       'semanal-types.test',
+                       'semanal-modules.test',
+                       'semanal-statements.test',
+                       'semanal-abstractclasses.test',
+                       'semanal-python2.test']
+
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in self.transform_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  test_transform,
+                                  base_path=test_temp_dir,
+                                  native_sep=True)
+        return c
+
+
+def test_transform(testcase: DataDrivenTestCase) -> None:
+    """Perform an identity transform test case."""
+
+    try:
+        src = '\n'.join(testcase.input)
+        options = Options()
+        options.use_builtins_fixtures = True
+        options.semantic_analysis_only = True
+        options.show_traceback = True
+        options.python_version = testfile_pyversion(testcase.file)
+        result = build.build(sources=[BuildSource('main', None, src)],
+                             options=options,
+                             alt_lib_path=test_temp_dir)
+        a = result.errors
+        if a:
+            raise CompileError(a)
+        # Include string representations of the source files in the actual
+        # output.
+        for fnam in sorted(result.files.keys()):
+            f = result.files[fnam]
+
+            # Omit the builtins module and files with a special marker in the
+            # path.
+            # TODO the test is not reliable
+            if (not f.path.endswith((os.sep + 'builtins.pyi',
+                                     'typing.pyi',
+                                     'abc.pyi'))
+                    and not os.path.basename(f.path).startswith('_')
+                    and not os.path.splitext(
+                        os.path.basename(f.path))[0].endswith('_')):
+                t = TestTransformVisitor()
+                f = t.mypyfile(f)
+                a += str(f).split('\n')
+    except CompileError as e:
+        a = e.messages
+    a = normalize_error_messages(a)
+    assert_string_arrays_equal(
+        testcase.output, a,
+        'Invalid semantic analyzer output ({}, line {})'.format(testcase.file,
+                                                                testcase.line))
+
+
+class TestTransformVisitor(TransformVisitor):
+    def type(self, type: Type) -> Type:
+        assert type is not None
+        return type
diff --git a/mypy/test/testtypegen.py b/mypy/test/testtypegen.py
new file mode 100644
index 0000000..e60d2e7
--- /dev/null
+++ b/mypy/test/testtypegen.py
@@ -0,0 +1,128 @@
+"""Test cases for the type checker: exporting inferred types"""
+
+import os.path
+import re
+
+from typing import Set, List
+
+from mypy import build
+from mypy.build import BuildSource
+from mypy.myunit import Suite
+from mypy.test import config
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.util import short_type
+from mypy.nodes import (
+    NameExpr, TypeVarExpr, CallExpr, Expression, MypyFile, AssignmentStmt, IntExpr
+)
+from mypy.traverser import TraverserVisitor
+from mypy.errors import CompileError
+from mypy.options import Options
+
+
+class TypeExportSuite(Suite):
+    # List of files that contain test case descriptions.
+    files = ['typexport-basic.test']
+
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in self.files:
+            c += parse_test_cases(os.path.join(config.test_data_prefix, f),
+                                  self.run_test, config.test_temp_dir)
+        return c
+
+    def run_test(self, testcase: DataDrivenTestCase) -> None:
+        try:
+            line = testcase.input[0]
+            mask = ''
+            if line.startswith('##'):
+                mask = '(' + line[2:].strip() + ')$'
+
+            src = '\n'.join(testcase.input)
+            options = Options()
+            options.use_builtins_fixtures = True
+            options.show_traceback = True
+            result = build.build(sources=[BuildSource('main', None, src)],
+                                 options=options,
+                                 alt_lib_path=config.test_temp_dir)
+            a = result.errors
+            map = result.types
+            nodes = map.keys()
+
+            # Ignore NameExpr nodes of variables with explicit (trivial) types
+            # to simplify output.
+            searcher = SkippedNodeSearcher()
+            for file in result.files.values():
+                file.accept(searcher)
+            ignored = searcher.nodes
+
+            # Filter nodes that should be included in the output.
+            keys = []
+            for node in nodes:
+                if node.line is not None and node.line != -1 and map[node]:
+                    if ignore_node(node) or node in ignored:
+                        continue
+                    if (re.match(mask, short_type(node))
+                            or (isinstance(node, NameExpr)
+                                and re.match(mask, node.name))):
+                        # Include node in output.
+                        keys.append(node)
+
+            for key in sorted(keys,
+                              key=lambda n: (n.line, short_type(n),
+                                             str(n) + str(map[n]))):
+                ts = str(map[key]).replace('*', '')  # Remove erased tags
+                ts = ts.replace('__main__.', '')
+                a.append('{}({}) : {}'.format(short_type(key), key.line, ts))
+        except CompileError as e:
+            a = e.messages
+        assert_string_arrays_equal(
+            testcase.output, a,
+            'Invalid type checker output ({}, line {})'.format(testcase.file,
+                                                               testcase.line))
+
+
+class SkippedNodeSearcher(TraverserVisitor):
+    def __init__(self) -> None:
+        self.nodes = set()  # type: Set[Expression]
+        self.is_typing = False
+
+    def visit_mypy_file(self, f: MypyFile) -> None:
+        self.is_typing = f.fullname() == 'typing' or f.fullname() == 'builtins'
+        super().visit_mypy_file(f)
+
+    def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
+        if s.type or ignore_node(s.rvalue):
+            for lvalue in s.lvalues:
+                if isinstance(lvalue, NameExpr):
+                    self.nodes.add(lvalue)
+        super().visit_assignment_stmt(s)
+
+    def visit_name_expr(self, n: NameExpr) -> None:
+        self.skip_if_typing(n)
+
+    def visit_int_expr(self, n: IntExpr) -> None:
+        self.skip_if_typing(n)
+
+    def skip_if_typing(self, n: Expression) -> None:
+        if self.is_typing:
+            self.nodes.add(n)
+
+
+def ignore_node(node: Expression) -> bool:
+    """Return True if node is to be omitted from test case output."""
+
+    # We want to get rid of object() expressions in the typing module stub
+    # and also TypeVar(...) expressions. Since detecting whether a node comes
+    # from the typing module is not easy, we just to strip them all away.
+    if isinstance(node, TypeVarExpr):
+        return True
+    if isinstance(node, NameExpr) and node.fullname == 'builtins.object':
+        return True
+    if isinstance(node, NameExpr) and node.fullname == 'builtins.None':
+        return True
+    if isinstance(node, CallExpr) and (ignore_node(node.callee) or
+                                       node.analyzed):
+        return True
+
+    return False
diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py
new file mode 100644
index 0000000..78fa2e1
--- /dev/null
+++ b/mypy/test/testtypes.py
@@ -0,0 +1,806 @@
+"""Test cases for mypy types and type operations."""
+
+from typing import List, Tuple
+
+from mypy.myunit import (
+    Suite, assert_equal, assert_true, assert_false, assert_type
+)
+from mypy.erasetype import erase_type
+from mypy.expandtype import expand_type
+from mypy.join import join_types, join_simple
+from mypy.meet import meet_types
+from mypy.types import (
+    UnboundType, AnyType, CallableType, TupleType, TypeVarDef, Type,
+    Instance, NoneTyp, Overloaded, TypeType, UnionType, UninhabitedType,
+    true_only, false_only, TypeVarId
+)
+from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, CONTRAVARIANT, INVARIANT, COVARIANT
+from mypy.subtypes import is_subtype, is_more_precise, is_proper_subtype
+from mypy.typefixture import TypeFixture, InterfaceTypeFixture
+
+
+class TypesSuite(Suite):
+    def __init__(self) -> None:
+        super().__init__()
+        self.x = UnboundType('X')  # Helpers
+        self.y = UnboundType('Y')
+        self.fx = TypeFixture()
+        self.function = self.fx.function
+
+    def test_any(self) -> None:
+        assert_equal(str(AnyType()), 'Any')
+
+    def test_simple_unbound_type(self) -> None:
+        u = UnboundType('Foo')
+        assert_equal(str(u), 'Foo?')
+
+    def test_generic_unbound_type(self) -> None:
+        u = UnboundType('Foo', [UnboundType('T'), AnyType()])
+        assert_equal(str(u), 'Foo?[T?, Any]')
+
+    def test_callable_type(self) -> None:
+        c = CallableType([self.x, self.y],
+                         [ARG_POS, ARG_POS],
+                         [None, None],
+                         AnyType(), self.function)
+        assert_equal(str(c), 'def (X?, Y?) -> Any')
+
+        c2 = CallableType([], [], [], NoneTyp(), None)
+        assert_equal(str(c2), 'def ()')
+
+    def test_callable_type_with_default_args(self) -> None:
+        c = CallableType([self.x, self.y], [ARG_POS, ARG_OPT], [None, None],
+                     AnyType(), self.function)
+        assert_equal(str(c), 'def (X?, Y? =) -> Any')
+
+        c2 = CallableType([self.x, self.y], [ARG_OPT, ARG_OPT], [None, None],
+                      AnyType(), self.function)
+        assert_equal(str(c2), 'def (X? =, Y? =) -> Any')
+
+    def test_callable_type_with_var_args(self) -> None:
+        c = CallableType([self.x], [ARG_STAR], [None], AnyType(), self.function)
+        assert_equal(str(c), 'def (*X?) -> Any')
+
+        c2 = CallableType([self.x, self.y], [ARG_POS, ARG_STAR],
+                      [None, None], AnyType(), self.function)
+        assert_equal(str(c2), 'def (X?, *Y?) -> Any')
+
+        c3 = CallableType([self.x, self.y], [ARG_OPT, ARG_STAR], [None, None],
+                      AnyType(), self.function)
+        assert_equal(str(c3), 'def (X? =, *Y?) -> Any')
+
+    def test_tuple_type(self) -> None:
+        assert_equal(str(TupleType([], None)), 'Tuple[]')
+        assert_equal(str(TupleType([self.x], None)), 'Tuple[X?]')
+        assert_equal(str(TupleType([self.x, AnyType()], None)), 'Tuple[X?, Any]')
+
+    def test_type_variable_binding(self) -> None:
+        assert_equal(str(TypeVarDef('X', 1, [], self.fx.o)), 'X')
+        assert_equal(str(TypeVarDef('X', 1, [self.x, self.y], self.fx.o)),
+                     'X in (X?, Y?)')
+
+    def test_generic_function_type(self) -> None:
+        c = CallableType([self.x, self.y], [ARG_POS, ARG_POS], [None, None],
+                     self.y, self.function, name=None,
+                     variables=[TypeVarDef('X', -1, [], self.fx.o)])
+        assert_equal(str(c), 'def [X] (X?, Y?) -> Y?')
+
+        v = [TypeVarDef('Y', -1, [], self.fx.o),
+             TypeVarDef('X', -2, [], self.fx.o)]
+        c2 = CallableType([], [], [], NoneTyp(), self.function, name=None, variables=v)
+        assert_equal(str(c2), 'def [Y, X] ()')
+
+
+class TypeOpsSuite(Suite):
+    def set_up(self) -> None:
+        self.fx = TypeFixture(INVARIANT)
+        self.fx_co = TypeFixture(COVARIANT)
+        self.fx_contra = TypeFixture(CONTRAVARIANT)
+
+    # expand_type
+
+    def test_trivial_expand(self) -> None:
+        for t in (self.fx.a, self.fx.o, self.fx.t, self.fx.nonet,
+                  self.tuple(self.fx.a),
+                  self.callable([], self.fx.a, self.fx.a), self.fx.anyt):
+            self.assert_expand(t, [], t)
+            self.assert_expand(t, [], t)
+            self.assert_expand(t, [], t)
+
+    def test_expand_naked_type_var(self) -> None:
+        self.assert_expand(self.fx.t, [(self.fx.t.id, self.fx.a)], self.fx.a)
+        self.assert_expand(self.fx.t, [(self.fx.s.id, self.fx.a)], self.fx.t)
+
+    def test_expand_basic_generic_types(self) -> None:
+        self.assert_expand(self.fx.gt, [(self.fx.t.id, self.fx.a)], self.fx.ga)
+
+    # IDEA: Add test cases for
+    #   tuple types
+    #   callable types
+    #   multiple arguments
+
+    def assert_expand(self,
+                      orig: Type,
+                      map_items: List[Tuple[TypeVarId, Type]],
+                      result: Type,
+                      ) -> None:
+        lower_bounds = {}
+
+        for id, t in map_items:
+            lower_bounds[id] = t
+
+        exp = expand_type(orig, lower_bounds)
+        # Remove erased tags (asterisks).
+        assert_equal(str(exp).replace('*', ''), str(result))
+
+    # erase_type
+
+    def test_trivial_erase(self) -> None:
+        for t in (self.fx.a, self.fx.o, self.fx.nonet, self.fx.anyt):
+            self.assert_erase(t, t)
+
+    def test_erase_with_type_variable(self) -> None:
+        self.assert_erase(self.fx.t, self.fx.anyt)
+
+    def test_erase_with_generic_type(self) -> None:
+        self.assert_erase(self.fx.ga, self.fx.gdyn)
+        self.assert_erase(self.fx.hab,
+                          Instance(self.fx.hi, [self.fx.anyt, self.fx.anyt]))
+
+    def test_erase_with_tuple_type(self) -> None:
+        self.assert_erase(self.tuple(self.fx.a), self.fx.std_tuple)
+
+    def test_erase_with_function_type(self) -> None:
+        self.assert_erase(self.fx.callable(self.fx.a, self.fx.b),
+                          self.fx.callable_type(self.fx.nonet))
+
+    def test_erase_with_type_object(self) -> None:
+        self.assert_erase(self.fx.callable_type(self.fx.a, self.fx.b),
+                          self.fx.callable_type(self.fx.nonet))
+
+    def test_erase_with_type_type(self) -> None:
+        self.assert_erase(self.fx.type_a, self.fx.type_a)
+        self.assert_erase(self.fx.type_t, self.fx.type_any)
+
+    def assert_erase(self, orig: Type, result: Type) -> None:
+        assert_equal(str(erase_type(orig)), str(result))
+
+    # is_more_precise
+
+    def test_is_more_precise(self) -> None:
+        fx = self.fx
+        assert_true(is_more_precise(fx.b, fx.a))
+        assert_true(is_more_precise(fx.b, fx.b))
+        assert_true(is_more_precise(fx.b, fx.b))
+        assert_true(is_more_precise(fx.b, fx.anyt))
+        assert_true(is_more_precise(self.tuple(fx.b, fx.a),
+                                    self.tuple(fx.b, fx.a)))
+
+        assert_false(is_more_precise(fx.a, fx.b))
+        assert_false(is_more_precise(fx.anyt, fx.b))
+        assert_false(is_more_precise(self.tuple(fx.b, fx.b),
+                                     self.tuple(fx.b, fx.a)))
+
+    # is_proper_subtype
+
+    def test_is_proper_subtype(self) -> None:
+        fx = self.fx
+
+        assert_true(is_proper_subtype(fx.a, fx.a))
+        assert_true(is_proper_subtype(fx.b, fx.a))
+        assert_true(is_proper_subtype(fx.b, fx.o))
+        assert_true(is_proper_subtype(fx.b, fx.o))
+
+        assert_false(is_proper_subtype(fx.a, fx.b))
+        assert_false(is_proper_subtype(fx.o, fx.b))
+
+        assert_true(is_proper_subtype(fx.anyt, fx.anyt))
+        assert_false(is_proper_subtype(fx.a, fx.anyt))
+        assert_false(is_proper_subtype(fx.anyt, fx.a))
+
+        assert_true(is_proper_subtype(fx.ga, fx.ga))
+        assert_true(is_proper_subtype(fx.gdyn, fx.gdyn))
+        assert_false(is_proper_subtype(fx.ga, fx.gdyn))
+        assert_false(is_proper_subtype(fx.gdyn, fx.ga))
+
+        assert_true(is_proper_subtype(fx.t, fx.t))
+        assert_false(is_proper_subtype(fx.t, fx.s))
+
+        assert_true(is_proper_subtype(fx.a, UnionType([fx.a, fx.b])))
+        assert_true(is_proper_subtype(UnionType([fx.a, fx.b]),
+                                      UnionType([fx.a, fx.b, fx.c])))
+        assert_false(is_proper_subtype(UnionType([fx.a, fx.b]),
+                                       UnionType([fx.b, fx.c])))
+
+    def test_is_proper_subtype_covariance(self) -> None:
+        fx_co = self.fx_co
+
+        assert_true(is_proper_subtype(fx_co.gsab, fx_co.gb))
+        assert_true(is_proper_subtype(fx_co.gsab, fx_co.ga))
+        assert_false(is_proper_subtype(fx_co.gsaa, fx_co.gb))
+        assert_true(is_proper_subtype(fx_co.gb, fx_co.ga))
+        assert_false(is_proper_subtype(fx_co.ga, fx_co.gb))
+
+    def test_is_proper_subtype_contravariance(self) -> None:
+        fx_contra = self.fx_contra
+
+        assert_true(is_proper_subtype(fx_contra.gsab, fx_contra.gb))
+        assert_false(is_proper_subtype(fx_contra.gsab, fx_contra.ga))
+        assert_true(is_proper_subtype(fx_contra.gsaa, fx_contra.gb))
+        assert_false(is_proper_subtype(fx_contra.gb, fx_contra.ga))
+        assert_true(is_proper_subtype(fx_contra.ga, fx_contra.gb))
+
+    def test_is_proper_subtype_invariance(self) -> None:
+        fx = self.fx
+
+        assert_true(is_proper_subtype(fx.gsab, fx.gb))
+        assert_false(is_proper_subtype(fx.gsab, fx.ga))
+        assert_false(is_proper_subtype(fx.gsaa, fx.gb))
+        assert_false(is_proper_subtype(fx.gb, fx.ga))
+        assert_false(is_proper_subtype(fx.ga, fx.gb))
+
+    # can_be_true / can_be_false
+
+    def test_empty_tuple_always_false(self) -> None:
+        tuple_type = self.tuple()
+        assert_true(tuple_type.can_be_false)
+        assert_false(tuple_type.can_be_true)
+
+    def test_nonempty_tuple_always_true(self) -> None:
+        tuple_type = self.tuple(AnyType(), AnyType())
+        assert_true(tuple_type.can_be_true)
+        assert_false(tuple_type.can_be_false)
+
+    def test_union_can_be_true_if_any_true(self) -> None:
+        union_type = UnionType([self.fx.a, self.tuple()])
+        assert_true(union_type.can_be_true)
+
+    def test_union_can_not_be_true_if_none_true(self) -> None:
+        union_type = UnionType([self.tuple(), self.tuple()])
+        assert_false(union_type.can_be_true)
+
+    def test_union_can_be_false_if_any_false(self) -> None:
+        union_type = UnionType([self.fx.a, self.tuple()])
+        assert_true(union_type.can_be_false)
+
+    def test_union_can_not_be_false_if_none_false(self) -> None:
+        union_type = UnionType([self.tuple(self.fx.a), self.tuple(self.fx.d)])
+        assert_false(union_type.can_be_false)
+
+    # true_only / false_only
+
+    def test_true_only_of_false_type_is_uninhabited(self) -> None:
+        to = true_only(NoneTyp())
+        assert_type(UninhabitedType, to)
+
+    def test_true_only_of_true_type_is_idempotent(self) -> None:
+        always_true = self.tuple(AnyType())
+        to = true_only(always_true)
+        assert_true(always_true is to)
+
+    def test_true_only_of_instance(self) -> None:
+        to = true_only(self.fx.a)
+        assert_equal(str(to), "A")
+        assert_true(to.can_be_true)
+        assert_false(to.can_be_false)
+        assert_type(Instance, to)
+        # The original class still can be false
+        assert_true(self.fx.a.can_be_false)
+
+    def test_true_only_of_union(self) -> None:
+        tup_type = self.tuple(AnyType())
+        # Union of something that is unknown, something that is always true, something
+        # that is always false
+        union_type = UnionType([self.fx.a, tup_type, self.tuple()])
+        to = true_only(union_type)
+        assert isinstance(to, UnionType)
+        assert_equal(len(to.items), 2)
+        assert_true(to.items[0].can_be_true)
+        assert_false(to.items[0].can_be_false)
+        assert_true(to.items[1] is tup_type)
+
+    def test_false_only_of_true_type_is_uninhabited(self) -> None:
+        fo = false_only(self.tuple(AnyType()))
+        assert_type(UninhabitedType, fo)
+
+    def test_false_only_of_false_type_is_idempotent(self) -> None:
+        always_false = NoneTyp()
+        fo = false_only(always_false)
+        assert_true(always_false is fo)
+
+    def test_false_only_of_instance(self) -> None:
+        fo = false_only(self.fx.a)
+        assert_equal(str(fo), "A")
+        assert_false(fo.can_be_true)
+        assert_true(fo.can_be_false)
+        assert_type(Instance, fo)
+        # The original class still can be true
+        assert_true(self.fx.a.can_be_true)
+
+    def test_false_only_of_union(self) -> None:
+        tup_type = self.tuple()
+        # Union of something that is unknown, something that is always true, something
+        # that is always false
+        union_type = UnionType([self.fx.a, self.tuple(AnyType()), tup_type])
+        assert_equal(len(union_type.items), 3)
+        fo = false_only(union_type)
+        assert isinstance(fo, UnionType)
+        assert_equal(len(fo.items), 2)
+        assert_false(fo.items[0].can_be_true)
+        assert_true(fo.items[0].can_be_false)
+        assert_true(fo.items[1] is tup_type)
+
+    # Helpers
+
+    def tuple(self, *a: Type) -> TupleType:
+        return TupleType(list(a), self.fx.std_tuple)
+
+    def callable(self, vars: List[str], *a: Type) -> CallableType:
+        """callable(args, a1, ..., an, r) constructs a callable with
+        argument types a1, ... an and return type r and type arguments
+        vars.
+        """
+        tv = []  # type: List[TypeVarDef]
+        n = -1
+        for v in vars:
+            tv.append(TypeVarDef(v, n, None, self.fx.o))
+            n -= 1
+        return CallableType(list(a[:-1]),
+                            [ARG_POS] * (len(a) - 1),
+                            [None] * (len(a) - 1),
+                            a[-1],
+                            self.fx.function,
+                            name=None,
+                            variables=tv)
+
+
+class JoinSuite(Suite):
+    def set_up(self) -> None:
+        self.fx = TypeFixture()
+
+    def test_trivial_cases(self) -> None:
+        for simple in self.fx.a, self.fx.o, self.fx.b:
+            self.assert_join(simple, simple, simple)
+
+    def test_class_subtyping(self) -> None:
+        self.assert_join(self.fx.a, self.fx.o, self.fx.o)
+        self.assert_join(self.fx.b, self.fx.o, self.fx.o)
+        self.assert_join(self.fx.a, self.fx.d, self.fx.o)
+        self.assert_join(self.fx.b, self.fx.c, self.fx.a)
+        self.assert_join(self.fx.b, self.fx.d, self.fx.o)
+
+    def test_tuples(self) -> None:
+        self.assert_join(self.tuple(), self.tuple(), self.tuple())
+        self.assert_join(self.tuple(self.fx.a),
+                         self.tuple(self.fx.a),
+                         self.tuple(self.fx.a))
+        self.assert_join(self.tuple(self.fx.b, self.fx.c),
+                         self.tuple(self.fx.a, self.fx.d),
+                         self.tuple(self.fx.a, self.fx.o))
+
+        self.assert_join(self.tuple(self.fx.a, self.fx.a),
+                         self.fx.std_tuple,
+                         self.fx.o)
+        self.assert_join(self.tuple(self.fx.a),
+                         self.tuple(self.fx.a, self.fx.a),
+                         self.fx.o)
+
+    def test_function_types(self) -> None:
+        self.assert_join(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.b))
+
+        self.assert_join(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.b, self.fx.b),
+                         self.callable(self.fx.b, self.fx.b))
+        self.assert_join(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.a),
+                         self.callable(self.fx.a, self.fx.a))
+        self.assert_join(self.callable(self.fx.a, self.fx.b),
+                         self.fx.function,
+                         self.fx.function)
+        self.assert_join(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.d, self.fx.b),
+                         self.fx.function)
+
+    def test_type_vars(self) -> None:
+        self.assert_join(self.fx.t, self.fx.t, self.fx.t)
+        self.assert_join(self.fx.s, self.fx.s, self.fx.s)
+        self.assert_join(self.fx.t, self.fx.s, self.fx.o)
+
+    def test_none(self) -> None:
+        # Any type t joined with None results in t.
+        for t in [NoneTyp(), self.fx.a, self.fx.o, UnboundType('x'),
+                  self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b), self.fx.anyt]:
+            self.assert_join(t, NoneTyp(), t)
+
+    def test_unbound_type(self) -> None:
+        self.assert_join(UnboundType('x'), UnboundType('x'), self.fx.anyt)
+        self.assert_join(UnboundType('x'), UnboundType('y'), self.fx.anyt)
+
+        # Any type t joined with an unbound type results in dynamic. Unbound
+        # type means that there is an error somewhere in the program, so this
+        # does not affect type safety (whatever the result).
+        for t in [self.fx.a, self.fx.o, self.fx.ga, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_join(t, UnboundType('X'), self.fx.anyt)
+
+    def test_any_type(self) -> None:
+        # Join against 'Any' type always results in 'Any'.
+        for t in [self.fx.anyt, self.fx.a, self.fx.o, NoneTyp(),
+                  UnboundType('x'), self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_join(t, self.fx.anyt, self.fx.anyt)
+
+    def test_mixed_truth_restricted_type_simple(self) -> None:
+        # join_simple against differently restricted truthiness types drops restrictions.
+        true_a = true_only(self.fx.a)
+        false_o = false_only(self.fx.o)
+        j = join_simple(self.fx.o, true_a, false_o)
+        assert_true(j.can_be_true)
+        assert_true(j.can_be_false)
+
+    def test_mixed_truth_restricted_type(self) -> None:
+        # join_types against differently restricted truthiness types drops restrictions.
+        true_any = true_only(AnyType())
+        false_o = false_only(self.fx.o)
+        j = join_types(true_any, false_o)
+        assert_true(j.can_be_true)
+        assert_true(j.can_be_false)
+
+    def test_other_mixed_types(self) -> None:
+        # In general, joining unrelated types produces object.
+        for t1 in [self.fx.a, self.fx.t, self.tuple(),
+                   self.callable(self.fx.a, self.fx.b)]:
+            for t2 in [self.fx.a, self.fx.t, self.tuple(),
+                       self.callable(self.fx.a, self.fx.b)]:
+                if str(t1) != str(t2):
+                    self.assert_join(t1, t2, self.fx.o)
+
+    def test_simple_generics(self) -> None:
+        self.assert_join(self.fx.ga, self.fx.ga, self.fx.ga)
+        self.assert_join(self.fx.ga, self.fx.gb, self.fx.ga)
+        self.assert_join(self.fx.ga, self.fx.gd, self.fx.o)
+        self.assert_join(self.fx.ga, self.fx.g2a, self.fx.o)
+
+        self.assert_join(self.fx.ga, self.fx.nonet, self.fx.ga)
+        self.assert_join(self.fx.ga, self.fx.anyt, self.fx.anyt)
+
+        for t in [self.fx.a, self.fx.o, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_join(t, self.fx.ga, self.fx.o)
+
+    def test_generics_with_multiple_args(self) -> None:
+        self.assert_join(self.fx.hab, self.fx.hab, self.fx.hab)
+        self.assert_join(self.fx.hab, self.fx.hbb, self.fx.hab)
+        self.assert_join(self.fx.had, self.fx.haa, self.fx.o)
+
+    def test_generics_with_inheritance(self) -> None:
+        self.assert_join(self.fx.gsab, self.fx.gb, self.fx.gb)
+        self.assert_join(self.fx.gsba, self.fx.gb, self.fx.ga)
+        self.assert_join(self.fx.gsab, self.fx.gd, self.fx.o)
+
+    def test_generics_with_inheritance_and_shared_supertype(self) -> None:
+        self.assert_join(self.fx.gsba, self.fx.gs2a, self.fx.ga)
+        self.assert_join(self.fx.gsab, self.fx.gs2a, self.fx.ga)
+        self.assert_join(self.fx.gsab, self.fx.gs2d, self.fx.o)
+
+    def test_generic_types_and_any(self) -> None:
+        self.assert_join(self.fx.gdyn, self.fx.ga, self.fx.gdyn)
+
+    def test_callables_with_any(self) -> None:
+        self.assert_join(self.callable(self.fx.a, self.fx.a, self.fx.anyt,
+                                       self.fx.a),
+                         self.callable(self.fx.a, self.fx.anyt, self.fx.a,
+                                       self.fx.anyt),
+                         self.callable(self.fx.a, self.fx.anyt, self.fx.anyt,
+                                       self.fx.anyt))
+
+    def test_overloaded(self) -> None:
+        c = self.callable
+
+        def ov(*items: CallableType) -> Overloaded:
+            return Overloaded(list(items))
+
+        fx = self.fx
+        func = fx.function
+        c1 = c(fx.a, fx.a)
+        c2 = c(fx.b, fx.b)
+        c3 = c(fx.c, fx.c)
+        self.assert_join(ov(c1, c2), c1, c1)
+        self.assert_join(ov(c1, c2), c2, c2)
+        self.assert_join(ov(c1, c2), ov(c1, c2), ov(c1, c2))
+        self.assert_join(ov(c1, c2), ov(c1, c3), c1)
+        self.assert_join(ov(c2, c1), ov(c3, c1), c1)
+        self.assert_join(ov(c1, c2), c3, func)
+
+    def test_overloaded_with_any(self) -> None:
+        c = self.callable
+
+        def ov(*items: CallableType) -> Overloaded:
+            return Overloaded(list(items))
+
+        fx = self.fx
+        any = fx.anyt
+        self.assert_join(ov(c(fx.a, fx.a), c(fx.b, fx.b)), c(any, fx.b), c(any, fx.b))
+        self.assert_join(ov(c(fx.a, fx.a), c(any, fx.b)), c(fx.b, fx.b), c(any, fx.b))
+
+    def test_join_interface_types(self) -> None:
+        self.skip()  # FIX
+        self.assert_join(self.fx.f, self.fx.f, self.fx.f)
+        self.assert_join(self.fx.f, self.fx.f2, self.fx.o)
+        self.assert_join(self.fx.f, self.fx.f3, self.fx.f)
+
+    def test_join_interface_and_class_types(self) -> None:
+        self.skip()  # FIX
+
+        self.assert_join(self.fx.o, self.fx.f, self.fx.o)
+        self.assert_join(self.fx.a, self.fx.f, self.fx.o)
+
+        self.assert_join(self.fx.e, self.fx.f, self.fx.f)
+
+    def test_join_class_types_with_interface_result(self) -> None:
+        self.skip()  # FIX
+        # Unique result
+        self.assert_join(self.fx.e, self.fx.e2, self.fx.f)
+
+        # Ambiguous result
+        self.assert_join(self.fx.e2, self.fx.e3, self.fx.anyt)
+
+    def test_generic_interfaces(self) -> None:
+        self.skip()  # FIX
+
+        fx = InterfaceTypeFixture()
+
+        self.assert_join(fx.gfa, fx.gfa, fx.gfa)
+        self.assert_join(fx.gfa, fx.gfb, fx.o)
+
+        self.assert_join(fx.m1, fx.gfa, fx.gfa)
+
+        self.assert_join(fx.m1, fx.gfb, fx.o)
+
+    def test_simple_type_objects(self) -> None:
+        t1 = self.type_callable(self.fx.a, self.fx.a)
+        t2 = self.type_callable(self.fx.b, self.fx.b)
+        tr = self.type_callable(self.fx.b, self.fx.a)
+
+        self.assert_join(t1, t1, t1)
+        j = join_types(t1, t1)
+        assert isinstance(j, CallableType)
+        assert_true(j.is_type_obj())
+
+        self.assert_join(t1, t2, tr)
+        self.assert_join(t1, self.fx.type_type, self.fx.type_type)
+        self.assert_join(self.fx.type_type, self.fx.type_type,
+                         self.fx.type_type)
+
+    def test_type_type(self) -> None:
+        self.assert_join(self.fx.type_a, self.fx.type_b, self.fx.type_a)
+        self.assert_join(self.fx.type_b, self.fx.type_any, self.fx.type_any)
+        self.assert_join(self.fx.type_b, self.fx.type_type, self.fx.type_type)
+        self.assert_join(self.fx.type_b, self.fx.type_c, self.fx.type_a)
+        self.assert_join(self.fx.type_c, self.fx.type_d, TypeType.make_normalized(self.fx.o))
+        self.assert_join(self.fx.type_type, self.fx.type_any, self.fx.type_type)
+        self.assert_join(self.fx.type_b, self.fx.anyt, self.fx.anyt)
+
+    # There are additional test cases in check-inference.test.
+
+    # TODO: Function types + varargs and default args.
+
+    def assert_join(self, s: Type, t: Type, join: Type) -> None:
+        self.assert_simple_join(s, t, join)
+        self.assert_simple_join(t, s, join)
+
+    def assert_simple_join(self, s: Type, t: Type, join: Type) -> None:
+        result = join_types(s, t)
+        actual = str(result)
+        expected = str(join)
+        assert_equal(actual, expected,
+                     'join({}, {}) == {{}} ({{}} expected)'.format(s, t))
+        assert_true(is_subtype(s, result),
+                    '{} not subtype of {}'.format(s, result))
+        assert_true(is_subtype(t, result),
+                    '{} not subtype of {}'.format(t, result))
+
+    def tuple(self, *a: Type) -> TupleType:
+        return TupleType(list(a), self.fx.std_tuple)
+
+    def callable(self, *a: Type) -> CallableType:
+        """callable(a1, ..., an, r) constructs a callable with argument types
+        a1, ... an and return type r.
+        """
+        n = len(a) - 1
+        return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n,
+                        a[-1], self.fx.function)
+
+    def type_callable(self, *a: Type) -> CallableType:
+        """type_callable(a1, ..., an, r) constructs a callable with
+        argument types a1, ... an and return type r, and which
+        represents a type.
+        """
+        n = len(a) - 1
+        return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n,
+                        a[-1], self.fx.type_type)
+
+
+class MeetSuite(Suite):
+    def set_up(self) -> None:
+        self.fx = TypeFixture()
+
+    def test_trivial_cases(self) -> None:
+        for simple in self.fx.a, self.fx.o, self.fx.b:
+            self.assert_meet(simple, simple, simple)
+
+    def test_class_subtyping(self) -> None:
+        self.assert_meet(self.fx.a, self.fx.o, self.fx.a)
+        self.assert_meet(self.fx.a, self.fx.b, self.fx.b)
+        self.assert_meet(self.fx.b, self.fx.o, self.fx.b)
+        self.assert_meet(self.fx.a, self.fx.d, NoneTyp())
+        self.assert_meet(self.fx.b, self.fx.c, NoneTyp())
+
+    def test_tuples(self) -> None:
+        self.assert_meet(self.tuple(), self.tuple(), self.tuple())
+        self.assert_meet(self.tuple(self.fx.a),
+                         self.tuple(self.fx.a),
+                         self.tuple(self.fx.a))
+        self.assert_meet(self.tuple(self.fx.b, self.fx.c),
+                         self.tuple(self.fx.a, self.fx.d),
+                         self.tuple(self.fx.b, NoneTyp()))
+
+        self.assert_meet(self.tuple(self.fx.a, self.fx.a),
+                         self.fx.std_tuple,
+                         self.tuple(self.fx.a, self.fx.a))
+        self.assert_meet(self.tuple(self.fx.a),
+                         self.tuple(self.fx.a, self.fx.a),
+                         NoneTyp())
+
+    def test_function_types(self) -> None:
+        self.assert_meet(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.b))
+
+        self.assert_meet(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.b, self.fx.b),
+                         self.callable(self.fx.a, self.fx.b))
+        self.assert_meet(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.a),
+                         self.callable(self.fx.a, self.fx.b))
+
+    def test_type_vars(self) -> None:
+        self.assert_meet(self.fx.t, self.fx.t, self.fx.t)
+        self.assert_meet(self.fx.s, self.fx.s, self.fx.s)
+        self.assert_meet(self.fx.t, self.fx.s, NoneTyp())
+
+    def test_none(self) -> None:
+        self.assert_meet(NoneTyp(), NoneTyp(), NoneTyp())
+
+        self.assert_meet(NoneTyp(), self.fx.anyt, NoneTyp())
+
+        # Any type t joined with None results in None, unless t is Any.
+        for t in [self.fx.a, self.fx.o, UnboundType('x'), self.fx.t,
+                  self.tuple(), self.callable(self.fx.a, self.fx.b)]:
+            self.assert_meet(t, NoneTyp(), NoneTyp())
+
+    def test_unbound_type(self) -> None:
+        self.assert_meet(UnboundType('x'), UnboundType('x'), self.fx.anyt)
+        self.assert_meet(UnboundType('x'), UnboundType('y'), self.fx.anyt)
+
+        self.assert_meet(UnboundType('x'), self.fx.anyt, UnboundType('x'))
+
+        # The meet of any type t with an unbound type results in dynamic.
+        # Unbound type means that there is an error somewhere in the program,
+        # so this does not affect type safety.
+        for t in [self.fx.a, self.fx.o, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_meet(t, UnboundType('X'), self.fx.anyt)
+
+    def test_dynamic_type(self) -> None:
+        # Meet against dynamic type always results in dynamic.
+        for t in [self.fx.anyt, self.fx.a, self.fx.o, NoneTyp(),
+                  UnboundType('x'), self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_meet(t, self.fx.anyt, t)
+
+    def test_simple_generics(self) -> None:
+        self.assert_meet(self.fx.ga, self.fx.ga, self.fx.ga)
+        self.assert_meet(self.fx.ga, self.fx.o, self.fx.ga)
+        self.assert_meet(self.fx.ga, self.fx.gb, self.fx.gb)
+        self.assert_meet(self.fx.ga, self.fx.gd, self.fx.nonet)
+        self.assert_meet(self.fx.ga, self.fx.g2a, self.fx.nonet)
+
+        self.assert_meet(self.fx.ga, self.fx.nonet, self.fx.nonet)
+        self.assert_meet(self.fx.ga, self.fx.anyt, self.fx.ga)
+
+        for t in [self.fx.a, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_meet(t, self.fx.ga, self.fx.nonet)
+
+    def test_generics_with_multiple_args(self) -> None:
+        self.assert_meet(self.fx.hab, self.fx.hab, self.fx.hab)
+        self.assert_meet(self.fx.hab, self.fx.haa, self.fx.hab)
+        self.assert_meet(self.fx.hab, self.fx.had, self.fx.nonet)
+        self.assert_meet(self.fx.hab, self.fx.hbb, self.fx.hbb)
+
+    def test_generics_with_inheritance(self) -> None:
+        self.assert_meet(self.fx.gsab, self.fx.gb, self.fx.gsab)
+        self.assert_meet(self.fx.gsba, self.fx.gb, self.fx.nonet)
+
+    def test_generics_with_inheritance_and_shared_supertype(self) -> None:
+        self.assert_meet(self.fx.gsba, self.fx.gs2a, self.fx.nonet)
+        self.assert_meet(self.fx.gsab, self.fx.gs2a, self.fx.nonet)
+
+    def test_generic_types_and_dynamic(self) -> None:
+        self.assert_meet(self.fx.gdyn, self.fx.ga, self.fx.ga)
+
+    def test_callables_with_dynamic(self) -> None:
+        self.assert_meet(self.callable(self.fx.a, self.fx.a, self.fx.anyt,
+                                       self.fx.a),
+                         self.callable(self.fx.a, self.fx.anyt, self.fx.a,
+                                       self.fx.anyt),
+                         self.callable(self.fx.a, self.fx.anyt, self.fx.anyt,
+                                       self.fx.anyt))
+
+    def test_meet_interface_types(self) -> None:
+        self.assert_meet(self.fx.f, self.fx.f, self.fx.f)
+        self.assert_meet(self.fx.f, self.fx.f2, self.fx.nonet)
+        self.assert_meet(self.fx.f, self.fx.f3, self.fx.f3)
+
+    def test_meet_interface_and_class_types(self) -> None:
+        self.assert_meet(self.fx.o, self.fx.f, self.fx.f)
+        self.assert_meet(self.fx.a, self.fx.f, self.fx.nonet)
+
+        self.assert_meet(self.fx.e, self.fx.f, self.fx.e)
+
+    def test_meet_class_types_with_shared_interfaces(self) -> None:
+        # These have nothing special with respect to meets, unlike joins. These
+        # are for completeness only.
+        self.assert_meet(self.fx.e, self.fx.e2, self.fx.nonet)
+        self.assert_meet(self.fx.e2, self.fx.e3, self.fx.nonet)
+
+    def test_meet_with_generic_interfaces(self) -> None:
+        # TODO fix
+        self.skip()
+
+        fx = InterfaceTypeFixture()
+        self.assert_meet(fx.gfa, fx.m1, fx.m1)
+        self.assert_meet(fx.gfa, fx.gfa, fx.gfa)
+        self.assert_meet(fx.gfb, fx.m1, fx.nonet)
+
+    def test_type_type(self) -> None:
+        self.assert_meet(self.fx.type_a, self.fx.type_b, self.fx.type_b)
+        self.assert_meet(self.fx.type_b, self.fx.type_any, self.fx.type_b)
+        self.assert_meet(self.fx.type_b, self.fx.type_type, self.fx.type_b)
+        self.assert_meet(self.fx.type_b, self.fx.type_c, self.fx.nonet)
+        self.assert_meet(self.fx.type_c, self.fx.type_d, self.fx.nonet)
+        self.assert_meet(self.fx.type_type, self.fx.type_any, self.fx.type_any)
+        self.assert_meet(self.fx.type_b, self.fx.anyt, self.fx.type_b)
+
+    # FIX generic interfaces + ranges
+
+    def assert_meet(self, s: Type, t: Type, meet: Type) -> None:
+        self.assert_simple_meet(s, t, meet)
+        self.assert_simple_meet(t, s, meet)
+
+    def assert_simple_meet(self, s: Type, t: Type, meet: Type) -> None:
+        result = meet_types(s, t)
+        actual = str(result)
+        expected = str(meet)
+        assert_equal(actual, expected,
+                     'meet({}, {}) == {{}} ({{}} expected)'.format(s, t))
+        assert_true(is_subtype(result, s),
+                    '{} not subtype of {}'.format(result, s))
+        assert_true(is_subtype(result, t),
+                    '{} not subtype of {}'.format(result, t))
+
+    def tuple(self, *a: Type) -> TupleType:
+        return TupleType(list(a), self.fx.std_tuple)
+
+    def callable(self, *a: Type) -> CallableType:
+        """callable(a1, ..., an, r) constructs a callable with argument types
+        a1, ... an and return type r.
+        """
+        n = len(a) - 1
+        return CallableType(list(a[:-1]),
+                            [ARG_POS] * n, [None] * n,
+                            a[-1], self.fx.function)
diff --git a/mypy/test/update.py b/mypy/test/update.py
new file mode 100644
index 0000000..e69de29
diff --git a/mypy/version.py b/mypy/version.py
index 2dff1c6..7fe2620 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -1,7 +1,7 @@
 import os
 from mypy import git
 
-__version__ = '0.520'
+__version__ = '0.521'
 base_version = __version__
 
 mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
diff --git a/mypy_self_check.ini b/mypy_self_check.ini
new file mode 100644
index 0000000..de99649
--- /dev/null
+++ b/mypy_self_check.ini
@@ -0,0 +1,8 @@
+[mypy]
+disallow_untyped_defs = True
+disallow_subclassing_any = True
+warn_no_return = True
+
+; historical exceptions
+[mypy-mypy.test.testextensions]
+disallow_untyped_defs = False
diff --git a/pinfer/.gitignore b/pinfer/.gitignore
new file mode 100644
index 0000000..e1dace5
--- /dev/null
+++ b/pinfer/.gitignore
@@ -0,0 +1,3 @@
+__pycache__
+*~
+*.pyc
diff --git a/pinfer/LICENSE b/pinfer/LICENSE
new file mode 100644
index 0000000..ecdce98
--- /dev/null
+++ b/pinfer/LICENSE
@@ -0,0 +1,27 @@
+pinfer is licensed under the terms of the MIT license, reproduced below.
+
+= = = = =
+
+The MIT License
+
+Copyright (c) 2013, 2014 Jukka Lehtosalo
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+= = = = =
diff --git a/pinfer/README b/pinfer/README
new file mode 100644
index 0000000..1f4fe4c
--- /dev/null
+++ b/pinfer/README
@@ -0,0 +1,47 @@
+ReadMe for pinfer
+=================
+
+Introduction
+------------
+
+Pinfer is tool for runtime type inference of variable types and
+function signatures in Python programs. The inferred types are mostly
+compatible with mypy types. It is intended for coming up with draft
+types when migrating Python code to static typing, but it can also be
+used as a code understanding or analysis tool.
+
+Pinfer is very experimental!
+
+Requirements
+------------
+
+ * Python 3.2 or later
+
+Basic usage
+-----------
+
+To infer types of all functions and methods in a module:
+
+  import foo   # target module
+  import pinfer
+
+  # set up type inference and dumping
+  pinfer.infer_module(foo)
+  pinfer.dump_at_exit()
+
+  # now do something with the module, e.g. run tests
+
+For inferring a Python module, add the above lines to the test suite.
+
+Handy wrapper
+-------------
+
+The p.py script provides a handy wrapper for the above.
+
+
+Copyright
+-------------
+
+This project includes files from the open source CPython project.  Those files are Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved.  The license can be found at https://github.com/python/cpython/blob/master/LICENSE.
+
diff --git a/pinfer/__init__.py b/pinfer/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pinfer/inspect3.py b/pinfer/inspect3.py
new file mode 100644
index 0000000..4d74be1
--- /dev/null
+++ b/pinfer/inspect3.py
@@ -0,0 +1,122 @@
+# from Python 3's inspect.py
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
+'''
+provide getfullargspec() and getcallargs() for Python 2
+'''
+
+import sys
+import inspect
+
+if sys.version_info.major == 2:
+
+    def getfullargspec(func):
+        (args, varargs, keywords, defaults) = inspect.getargspec(func)
+        return (args, varargs, keywords, defaults, [], [], {})
+
+
+    def getcallargs(*func_and_positional, **named):
+        """Get the mapping of arguments to values.
+
+        A dict is returned, with keys the function argument names (including the
+        names of the * and ** arguments, if any), and values the respective bound
+        values from 'positional' and 'named'."""
+        func = func_and_positional[0]
+        positional = func_and_positional[1:]
+        spec = getfullargspec(func)
+        args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, ann = spec
+        f_name = func.__name__
+        arg2value = {}
+
+
+        if inspect.ismethod(func) and func.__self__ is not None:
+            # implicit 'self' (or 'cls' for classmethods) argument
+            positional = (func.__self__,) + positional
+        num_pos = len(positional)
+        num_args = len(args)
+        num_defaults = len(defaults) if defaults else 0
+
+        n = min(num_pos, num_args)
+        for i in range(n):
+            arg2value[args[i]] = positional[i]
+        if varargs:
+            arg2value[varargs] = tuple(positional[n:])
+        possible_kwargs = set(args + kwonlyargs)
+        if varkw:
+            arg2value[varkw] = {}
+        for kw, value in named.items():
+            if kw not in possible_kwargs:
+                if not varkw:
+                    raise TypeError("%s() got an unexpected keyword argument %r" %
+                                    (f_name, kw))
+                arg2value[varkw][kw] = value
+                continue
+            if kw in arg2value:
+                raise TypeError("%s() got multiple values for argument %r" %
+                                (f_name, kw))
+            arg2value[kw] = value
+        if num_pos > num_args and not varargs:
+            _too_many(f_name, args, kwonlyargs, varargs, num_defaults,
+                       num_pos, arg2value)
+        if num_pos < num_args:
+            req = args[:num_args - num_defaults]
+            for arg in req:
+                if arg not in arg2value:
+                    _missing_arguments(f_name, req, True, arg2value)
+            for i, arg in enumerate(args[num_args - num_defaults:]):
+                if arg not in arg2value:
+                    arg2value[arg] = defaults[i]
+        missing = 0
+        for kwarg in kwonlyargs:
+            if kwarg not in arg2value:
+                if kwonlydefaults and kwarg in kwonlydefaults:
+                    arg2value[kwarg] = kwonlydefaults[kwarg]
+                else:
+                    missing += 1
+        if missing:
+            _missing_arguments(f_name, kwonlyargs, False, arg2value)
+        return arg2value
+
+
+    def _too_many(f_name, args, kwonly, varargs, defcount, given, values):
+        atleast = len(args) - defcount
+        kwonly_given = len([arg for arg in kwonly if arg in values])
+        if varargs:
+            plural = atleast != 1
+            sig = "at least %d" % (atleast,)
+        elif defcount:
+            plural = True
+            sig = "from %d to %d" % (atleast, len(args))
+        else:
+            plural = len(args) != 1
+            sig = str(len(args))
+        kwonly_sig = ""
+        if kwonly_given:
+            msg = " positional argument%s (and %d keyword-only argument%s)"
+            kwonly_sig = (msg % ("s" if given != 1 else "", kwonly_given,
+                                 "s" if kwonly_given != 1 else ""))
+        raise TypeError("%s() takes %s positional argument%s but %d%s %s given" %
+                (f_name, sig, "s" if plural else "", given, kwonly_sig,
+                 "was" if given == 1 and not kwonly_given else "were"))
+
+
+    def _missing_arguments(f_name, argnames, pos, values):
+        names = [repr(name) for name in argnames if name not in values]
+        missing = len(names)
+        if missing == 1:
+            s = names[0]
+        elif missing == 2:
+            s = "{} and {}".format(*names)
+        else:
+            tail = ", {} and {}".format(*names[-2:])
+            del names[-2:]
+            s = ", ".join(names) + tail
+        raise TypeError("%s() missing %i required %s argument%s: %s" %
+                        (f_name, missing,
+                          "positional" if pos else "keyword-only",
+                          "" if missing == 1 else "s", s))
+
+
+else:
+    getfullargspec = inspect.getfullargspec
+    getcallargs = inspect.getcallargs
diff --git a/pinfer/p.py b/pinfer/p.py
new file mode 100644
index 0000000..451038d
--- /dev/null
+++ b/pinfer/p.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python3
+"""Stub to run pinfer on a module.
+
+Usage:
+
+  p.py targetmod testfile [outfile] [ -- testargs]
+
+Where:
+
+  targetmod:  the full target module (e.g. textwrap)
+  testfile: the full test module file (e.g. test/test_textwrap.py)
+  outfile:  where to write the annotated module.  If unspecified, will
+            write stubs at end of stdout.
+
+Example invocation:
+
+  python3 p.py textwrap test/test_textwrap.py
+"""
+
+
+import sys
+import imp
+import pinfer
+import os
+import atexit
+import inspect
+
+iport = __builtins__.__import__
+watched = set()
+
+
+def inferring_import(*args, **kwargs):
+    module = iport(*args, **kwargs)
+    if module not in watched:
+        watched.add(module)
+        pinfer.infer_module(module)
+    return module
+
+
+def main():
+    if '--' in sys.argv:
+        argslen = sys.argv.index('--')
+    else:
+        argslen = len(sys.argv)
+    args = sys.argv[1:argslen]
+    del sys.argv[1:argslen + 1]
+
+    if len(args) == 2:
+        targetpackage, testfile = args
+        outfile = None
+    elif len(args) == 3:
+        targetpackage, testfile, outfile = args
+    else:
+        sys.stderr.write('Usage: %s targetmodule testfile [outfile] [ -- testargs]\n' %
+                         sys.argv[0])
+        sys.exit(2)
+
+    # help us with local imports
+    filemodule = os.path.dirname(os.path.abspath(testfile))
+    sys.path.append(filemodule)
+
+    targetmod = __import__(targetpackage)
+    targetfile = inspect.getfile(targetmod)
+    pinfer.infer_module(targetmod)
+
+    if outfile:
+        @atexit.register
+        def rewrite_file(targetfile=targetfile, outfile=outfile, pinfer=pinfer):
+            if targetfile.endswith(".pyc"):
+                targetfile = targetfile[0:-1]
+            annotated = pinfer.annotate_file(targetfile)
+            open(outfile, "w").write(annotated)
+    else:
+        pinfer.dump_at_exit()
+
+    pinfer.ignore_files.add(os.path.abspath(testfile))
+
+    # run testfile as main
+    del sys.modules['__main__']
+    imp.load_source('__main__', testfile)
+
+if __name__ == '__main__':
+    main()
diff --git a/pinfer/pinfer.py b/pinfer/pinfer.py
new file mode 100644
index 0000000..3dd1445
--- /dev/null
+++ b/pinfer/pinfer.py
@@ -0,0 +1,686 @@
+"""Tools for runtime type inference"""
+
+import inspect
+from inspect3 import getfullargspec, getcallargs
+import types
+import codecs
+import os
+import tokenize
+try:
+    from StringIO import StringIO
+    from unparse import Unparser
+except:
+    from io import StringIO
+    from unparse3 import Unparser
+import ast
+
+
+MAX_INFERRED_TUPLE_LENGTH = 10
+PREFERRED_LINE_LENGTH = 79
+
+
+var_db = {}  # (location, variable) -> type
+func_argid_db = {}  # funcid -> argspec
+func_arg_db = {}  # (funcid, name) -> type
+func_return_db = {}  # funcname -> type
+func_source_db = {}  # funcid -> source string
+#func_info_db = {}  # funcid -> (class, name, argspec, file, line, source)
+ignore_files = set()
+
+# The type inferencing wrapper should not be reentrant.  It's not, in theory, calling
+# out to any external code which we would want to infer the types of.  However,
+# sometimes we do something like infer_type(arg.keys()) or infer_type(arg.values()) if
+# the arg is a collection, and we want to know about the types of its elements.  .keys(),
+# .values(), etc. can be overloaded, possibly to a method we've wrapped.  This can become
+# infinitely recursive, particularly because on something like arg.keys(), keys() gets passed
+# arg as the first parameter, so if we've wrapped keys() we'll try to infer_type(arg),
+# which will detect it's a dictionary, call infer_type(arg.keys()), recurse and so on.
+# We ran in to this problem with collections.OrderedDict.
+# To prevent reentrancy, we set is_performing_inference = True iff we're in the middle of
+# inferring the types of a function.  If we try to run another function we've wrapped,
+# we skip type inferencing so we can't accidentally infinitely recurse.
+is_performing_inference = False
+
+
+def reset():
+    global var_db, func_argid_db, func_arg_db, func_return_db, func_source_db
+    global ignore_files, is_performing_inference
+    var_db = {}
+    func_arg_db = {}
+    func_return_db = {}
+    # we don't actually want to clear these on reset(), or we'll
+    # lose the functions we've already wrapped forever.
+    #func_source_db = {}
+    #func_argid_db = {}
+    is_performing_inference = False
+    ignore_files = set()
+
+
+def format_state(pretty=False):
+    lines = []
+    for loc, var in sorted(var_db.keys()):
+        lines.append('%s: %s' % (var, var_db[(loc, var)]))
+    funcnames = sorted(set(func_return_db.keys()))
+    prevclass = ''
+    indent = ''
+    for funcid in funcnames:
+        curclass, name, sourcefile, sourceline = funcid
+        if curclass != prevclass:
+            if curclass:
+                lines.append('class %s(...):' % curclass)
+                indent = ' ' * 4
+            else:
+                indent = ''
+            prevclass = curclass
+
+        lines.append(format_sig(funcid, name, indent, pretty))
+    return '\n'.join(lines)
+
+
+def unparse_ast(node):
+    buf = StringIO()
+    Unparser(node, buf)
+    return buf.getvalue().strip()
+
+
+def format_sig(funcid, fname, indent, pretty, defaults=[]):
+    (argnames, varargs, varkw, _, kwonlyargs, _, _) = func_argid_db[funcid]
+
+    # to get defaults, parse the function, get the nodes for the
+    # defaults, then unparse them
+    try:
+        fn_ast = ast.parse(func_source_db[funcid].strip()).body[0]
+
+        # override fname if we parsed a different one
+        fname = fn_ast.name
+
+        defaults = [unparse_ast(dn) for dn in fn_ast.args.defaults]
+
+        if hasattr(fn_ast.args, 'kw_defaults'):
+            kwonly_defaults = [unparse_ast(dn) for dn in fn_ast.args.kw_defaults]
+        else:
+            kwonly_defaults = []
+    except:
+        defaults, kwonly_defaults = [], []
+    finally:
+        # pad defaults to match the length of args
+        defaults = ([None] * (len(argnames) - len(defaults))) + defaults
+        kwonly_defaults = ([None] * (len(kwonlyargs) - len(kwonly_defaults))) + kwonly_defaults
+
+    args = [('', arg, default) for (arg, default) in zip(argnames, defaults)]
+
+    if varargs:
+        args += [('*', varargs, None)]
+    elif len(kwonlyargs) > 0:
+        args += [('*', '', None)]
+    if len(kwonlyargs) > 0:
+        args += [('', arg, default) for (arg, default) in zip(kwonlyargs, kwonly_defaults)]
+    if varkw:
+        args += [('**', varkw, None)]
+
+    argstrs = []
+    for i, (prefix, arg, default) in enumerate(args):
+        argstr = prefix + arg
+
+        # Omit type of self argument.
+        if (funcid, arg) in func_arg_db and not (i == 0 and arg == 'self'):
+            argstr += ': %s' % func_arg_db[(funcid, arg)]
+
+        if default:
+            argstr += ' = %s' % default
+
+        argstrs.append(argstr)
+
+    ret = str(func_return_db.get(funcid, Unknown()))
+
+    sig = 'def %s(%s) -> %s' % (fname, ', '.join(argstrs), ret)
+    if not pretty or len(sig) <= PREFERRED_LINE_LENGTH or not args:
+        return indent + sig
+
+    else:
+        # Format into multiple lines to conserve horizontal space.
+        first = indent + 'def %s(' % fname
+        extra_indent = first.index('(') + 1
+
+        decl = indent + first
+        decl += (',\n' + indent + ' ' * extra_indent).join(argstrs)
+        decl += ')\n%s -> %s' % (indent + ' ' * (extra_indent - 4), ret)
+        return decl
+
+
+def annotate_file(path):
+    # this should be documented somewhere...
+    INDENT_TOKEN = 5
+
+    with open(path, 'r') as targetfile:
+        source = targetfile.read()
+
+    line_offsets = []
+    source_length = 0
+    for line in source.split('\n'):
+        line_offsets.append(source_length)
+        source_length = source_length + len(line) + 1
+
+    funcids = set(funcid for funcid, arg in func_arg_db)
+
+    # list of (oldstart, oldend, replacement)
+    replacements = []  # type: List[Tuple[Int, Int, String]]
+
+    for funcid in funcids:
+        class_name, name, sourcefile, def_start_line = funcid
+        if sourcefile != path:
+            continue
+
+        func_source = func_source_db[funcid]
+        tokens = list(tokenize.generate_tokens(StringIO(func_source).readline))
+        assert len(tokens) > 0
+
+        # we're making the assumption that the def at least gets to start on
+        # it's own line, which is fine for non-lambdas
+
+        if tokens[0][0] == INDENT_TOKEN:
+            indent = tokens[0][1]
+            del tokens[0]
+        else:
+            indent = ''
+
+        # Find the first indent, which should be between the end of the def
+        # and before the start of the body.  Then find the preceding colon,
+        # which should be at the end of the def.
+
+        for indent_loc in range(len(tokens)):
+            if tokens[indent_loc][0] == INDENT_TOKEN:
+                function_is_one_line = False
+                break
+            else:
+                function_is_one_line = True
+
+        if function_is_one_line:
+            # we're also making the assumption that the def has an indent on the
+            # line following the signature, which is true almost all of the time.
+            # If this is not the case, we should just leave a comment above the
+            # function, although I might not have time to do that now.
+            continue
+
+        for def_end_loc in range(indent_loc, -1, -1):
+            if tokens[def_end_loc][1] == ':':
+                break
+
+        assert def_end_loc > 0
+
+        def_end_line, def_end_col = tokens[def_end_loc][2]
+        def_end_line -= 1  # the tokenizer apparently 1-indexes lines
+        def_end_line += def_start_line
+
+        def_start_offset = line_offsets[def_start_line]
+        def_end_offset = line_offsets[def_end_line] + def_end_col
+
+        annotated_def = format_sig(funcid, name, indent, True)
+
+        replacements.append((def_start_offset, def_end_offset, annotated_def))
+
+    # ideally, we'd put this after the docstring
+    replacements.append((0, 0, "from typing import List, Dict, Set, Tuple, Callable, Pattern, Match, Union, Optional\n"))
+
+    # absurdly inefficient algorithm: replace with O(n) writer
+
+    for (start, end, replacement) in sorted(replacements, key=lambda r: r[0], reverse=True):
+        source = source[0:start] + replacement + source[end:]
+
+    return source
+
+
+def dump():
+    s = format_state(pretty=True)
+    if s:
+        print()
+        print('INFERRED TYPES:')
+        print(s)
+    reset()
+
+
+def dump_at_exit():
+    import atexit
+    atexit.register(dump)
+
+
+def get_defining_file(obj):
+    try:
+        path = os.path.abspath(inspect.getfile(obj))
+        if path.endswith('.pyc'):
+            path = path[0:-1]
+        return path
+    except:
+        return None
+
+
+def infer_var(name, value):
+    key = (None, name)
+    update_var_db(key, value)
+
+
+def infer_attrs(x):
+    if hasattr(x, '__class__'):
+        t = x.__class__
+    else:
+        t = type(x)
+    cls = t.__name__
+    typedict = t.__dict__
+    for dict in x.__dict__, typedict:
+        for attr, value in dict.items():
+            if attr in ('__dict__', '__doc__', '__module__', '__weakref__'):
+                continue
+            if type(value) is type(infer_attrs) and dict is typedict:
+                # Skip methods.
+                continue
+            key = (None, '%s.%s' % (cls, attr))
+            update_var_db(key, value)
+
+
+def infer_method_signature(class_name):
+    def decorator(func):
+        return infer_signature(func, class_name)
+    return decorator
+
+
+def infer_signature(func, class_name=''):
+    """Decorator that infers the signature of a function."""
+
+    # infer_method_signature should be idempotent
+    if hasattr(func, '__is_inferring_sig'):
+        return func
+
+    assert func.__module__ != infer_method_signature.__module__
+
+    try:
+        funcfile = get_defining_file(func)
+        funcsource, sourceline = inspect.getsourcelines(func)
+        sourceline -= 1  # getsourcelines is apparently 1-indexed
+    except:
+        return func
+
+    funcid = (class_name, func.__name__, funcfile, sourceline)
+    func_source_db[funcid] = ''.join(funcsource)
+
+    try:
+        func_argid_db[funcid] = getfullargspec(func)
+        vargs_name, kwargs_name = func_argid_db[funcid][1], func_argid_db[funcid][2]
+    except TypeError:
+        # Not supported.
+        return func
+
+    def wrapper(*args, **kwargs):
+        global is_performing_inference
+        # If we're already doing inference, we should be in our own code, not code we're checking.
+        # Not doing this check sometimes results in infinite recursion.
+
+        if is_performing_inference:
+            return func(*args, **kwargs)
+
+        expecting_type_error, got_type_error, got_exception = False, False, False
+
+        is_performing_inference = True
+        try:
+            callargs = getcallargs(func, *args, **kwargs)
+
+            # we have to handle *args and **kwargs separately
+            if vargs_name:
+                va = callargs.pop(vargs_name)
+            if kwargs_name:
+                kw = callargs.pop(kwargs_name)
+
+            arg_db = {arg: infer_value_type(value) for arg, value in callargs.items()}
+
+            # *args and **kwargs need to merge the types of all their values
+            if vargs_name:
+                arg_db[vargs_name] = union_many_types(*[infer_value_type(v) for v in va])
+            if kwargs_name:
+                arg_db[kwargs_name] = union_many_types(*[infer_value_type(v) for v in kw.values()])
+
+        except TypeError:
+            got_exception = expecting_type_error = True
+        except:
+            got_exception = True
+        finally:
+            is_performing_inference = False
+
+        try:
+            ret = func(*args, **kwargs)
+        except TypeError:
+            got_type_error = got_exception = True
+            raise
+        except:
+            got_exception = True
+            raise
+        finally:
+            if not got_exception:
+                assert not expecting_type_error
+
+                # if we didn't get a TypeError, update the actual database
+                for arg, t in arg_db.items():
+                    update_db(func_arg_db, (funcid, arg), t)
+
+                # if we got an exception, we don't have a ret
+                if not got_exception:
+                    is_performing_inference = True
+                    try:
+                        type = infer_value_type(ret)
+                        update_db(func_return_db, funcid, type)
+                    except:
+                        pass
+                    finally:
+                        is_performing_inference = False
+
+        return ret
+
+    if hasattr(func, '__name__'):
+        wrapper.__name__ = func.__name__
+    wrapper.__is_inferring_sig = True
+    return wrapper
+
+
+def infer_class(cls):
+    """Class decorator for inferring signatures of all methods of the class."""
+    for attr, value in cls.__dict__.items():
+        if type(value) is type(infer_class):
+            setattr(cls, attr, infer_method_signature(cls.__name__)(value))
+    return cls
+
+
+def infer_module(namespace):
+    if hasattr(namespace, '__dict__'):
+        namespace = namespace.__dict__
+    for name, value in list(namespace.items()):
+        if inspect.isfunction(value):
+            namespace[name] = infer_signature(value)
+        elif inspect.isclass(value):
+            namespace[name] = infer_class(value)
+
+
+def update_var_db(key, value):
+    type = infer_value_type(value)
+    update_db(var_db, key, type)
+
+
+def update_db(db, key, type):
+    if key not in db:
+        db[key] = type
+    else:
+        db[key] = combine_types(db[key], type)
+
+
+def merge_db(db, other):
+    assert id(db) != id(other)
+    for key in other.keys():
+        if key not in db:
+            db[key] = other[key]
+        else:
+            db[key] = combine_types(db[key], other[key])
+
+
+def infer_value_type(value, depth=0):
+    # Prevent infinite recursion
+    if depth > 5:
+        return Unknown()
+    depth += 1
+
+    if value is None:
+        return None
+    elif isinstance(value, list):
+        return Generic('List', [infer_value_types(value, depth)])
+    elif isinstance(value, dict):
+        keytype = infer_value_types(value.keys(), depth)
+        valuetype = infer_value_types(value.values(), depth)
+        return Generic('Dict', (keytype, valuetype))
+    elif isinstance(value, tuple):
+        if len(value) <= MAX_INFERRED_TUPLE_LENGTH:
+            return Tuple(infer_value_type(item, depth)
+                         for item in value)
+        else:
+            return Generic('TupleSequence', [infer_value_types(value, depth)])
+    elif isinstance(value, set):
+        return Generic('Set', [infer_value_types(value, depth)])
+    elif isinstance(value, types.MethodType) or isinstance(value, types.FunctionType):
+        return Instance(Callable)
+    else:
+        for t in type(value).mro():
+            if get_defining_file(t) in ignore_files:
+                continue
+            elif t is object:
+                return Any()
+            elif hasattr(types, 'InstanceType') and t is types.InstanceType:
+                return Any()
+            else:
+                return Instance(t)
+        else:
+            return Any()
+
+
+def infer_value_types(values, depth=0):
+    """Infer a single type for an iterable of values.
+
+    >>> infer_value_types((1, 'x'))
+    Union(int, str)
+    >>> infer_value_types([])
+    Unknown
+    """
+    inferred = Unknown()
+    for value in sample(values):
+        type = infer_value_type(value, depth)
+        inferred = combine_types(inferred, type)
+    return inferred
+
+
+def sample(values):
+    # TODO only return a sample of values
+    return list(values)
+
+
+def union_many_types(*types):
+    union = Unknown()
+    for t in types:
+        union = combine_types(union, t)
+    return union
+
+
+def combine_types(x, y):
+    """Perform a union of two types.
+
+    >>> combine_types(Instance(int), None)
+    Optional[int]
+    """
+    if isinstance(x, Unknown):
+        return y
+    if isinstance(y, Unknown):
+        return x
+    if isinstance(x, Any):
+        return x
+    if isinstance(y, Any):
+        return y
+    if isinstance(x, Union):
+        return combine_either(x, y)
+    if isinstance(y, Union):
+        return combine_either(y, x)
+    if x == y:
+        return x
+    return simplify_either([x], [y])
+
+
+def combine_either(either, x):
+    if isinstance(x, Union):
+        xtypes = x.types
+    else:
+        xtypes = [x]
+    return simplify_either(either.types, xtypes)
+
+
+def simplify_either(x, y):
+    numerics = [Instance(int), Instance(float), Instance(complex)]
+
+    # TODO this is O(n**2); use an O(n) algorithm instead
+    result = list(x)
+    for type in y:
+        if isinstance(type, Generic):
+            for i, rt in enumerate(result):
+                if isinstance(rt, Generic) and type.typename == rt.typename:
+                    result[i] = Generic(rt.typename,
+                                        (combine_types(t, s)
+                                         for t, s in zip(type.args, rt.args)))
+                    break
+            else:
+                result.append(type)
+        elif isinstance(type, Tuple):
+            for i, rt in enumerate(result):
+                if isinstance(rt, Tuple) and len(type) == len(rt):
+                    result[i] = Tuple(combine_types(t, s)
+                                      for t, s in zip(type.itemtypes,
+                                                      rt.itemtypes))
+                    break
+            else:
+                result.append(type)
+        elif type in numerics:
+            for i, rt in enumerate(result):
+                if rt in numerics:
+                    result[i] = numerics[max(numerics.index(rt), numerics.index(type))]
+                    break
+            else:
+                result.append(type)
+        elif isinstance(type, Instance):
+            for i, rt in enumerate(result):
+                if isinstance(rt, Instance):
+                    # Union[A, SubclassOfA] -> A
+                    # Union[A, A] -> A, because issubclass(A, A) == True,
+                    if issubclass(type.typeobj, rt.typeobj):
+                        break
+                    elif issubclass(rt.typeobj, type.typeobj):
+                        result[i] = type
+                        break
+            else:
+                result.append(type)
+        elif type not in result:
+            result.append(type)
+
+    if len(result) > 1:
+        return Union(result)
+    else:
+        return result[0]
+
+
+class TypeBase(object):
+    """Abstract base class of all type objects.
+
+    Type objects use isinstance tests librarally -- they don't support duck
+    typing well.
+    """
+
+    def __eq__(self, other):
+        if type(other) is not type(self):
+            return False
+        for attr in self.__dict__:
+            if getattr(other, attr) != getattr(self, attr):
+                return False
+        return True
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __repr__(self):
+        return str(self)
+
+
+class Instance(TypeBase):
+    def __init__(self, typeobj):
+        assert not inspect.isclass(typeobj) or not issubclass(typeobj, TypeBase)
+        self.typeobj = typeobj
+
+    def __str__(self):
+        # cheat on regular expression objects which have weird class names
+        # to be consistent with typing.py
+        if self.typeobj == Pattern:
+            return "Pattern"
+        elif self.typeobj == Match:
+            return "Match"
+        else:
+            return self.typeobj.__name__
+
+    def __repr__(self):
+        return 'Instance(%s)' % self
+
+
+class Generic(TypeBase):
+    def __init__(self, typename, args):
+        self.typename = typename
+        self.args = tuple(args)
+
+    def __str__(self):
+        return '%s[%s]' % (self.typename, ', '.join(str(t)
+                                                    for t in self.args))
+
+
+class Tuple(TypeBase):
+    def __init__(self, itemtypes):
+        self.itemtypes = tuple(itemtypes)
+
+    def __len__(self):
+        return len(self.itemtypes)
+
+    def __str__(self):
+        return 'Tuple[%s]' % (', '.join(str(t) for t in self.itemtypes))
+
+
+class Union(TypeBase):
+    def __init__(self, types):
+        assert len(types) > 1
+        self.types = tuple(types)
+
+    def __eq__(self, other):
+        if type(other) is not Union:
+            return False
+        # TODO this is O(n**2); use an O(n) algorithm instead
+        for t in self.types:
+            if t not in other.types:
+                return False
+        for t in other.types:
+            if t not in self.types:
+                return False
+        return True
+
+    def __str__(self):
+        types = list(self.types)
+        if str != bytes:  # on Python 2 str == bytes
+            if Instance(bytes) in types and Instance(str) in types:
+                # we Union[bytes, str] -> AnyStr as late as possible so we avoid
+                # corner cases like subclasses of bytes or str
+                types.remove(Instance(bytes))
+                types.remove(Instance(str))
+                types.append(Instance(AnyStr))
+        if len(types) == 1:
+            return str(types[0])
+        elif len(types) == 2 and None in types:
+            type = [t for t in types if t is not None][0]
+            return 'Optional[%s]' % type
+        else:
+            return 'Union[%s]' % (', '.join(sorted(str(t) for t in types)))
+
+
+class Unknown(TypeBase):
+    def __str__(self):
+        return 'Unknown'
+
+    def __repr__(self):
+        return 'Unknown()'
+
+
+class Any(TypeBase):
+    def __str__(self):
+        return 'Any'
+
+    def __repr__(self):
+        return 'Any()'
+
+
+class AnyStr(object): pass
+class Callable(object): pass
+import re
+Pattern = type(re.compile(u''))
+Match = type(re.match(u'', u''))
diff --git a/pinfer/test_pinfer.py b/pinfer/test_pinfer.py
new file mode 100644
index 0000000..d6168db
--- /dev/null
+++ b/pinfer/test_pinfer.py
@@ -0,0 +1,302 @@
+"""Test cases for the infer module"""
+
+import unittest
+
+from pinfer import Instance, Generic, Tuple, Union, Unknown
+import pinfer
+
+
+class TestInfer(unittest.TestCase):
+    def setUp(self):
+        self.int = Instance(int)
+        self.float = Instance(float)
+
+    def tearDown(self):
+        pinfer.reset()
+
+    def test_instance(self):
+        i = self.int
+        self.assertEqual(i.typeobj, int)
+        self.assertEqual(str(i), 'int')
+        self.assertEqual(repr(i), 'Instance(int)')
+
+        self.assertTrue(i == Instance(int))
+        self.assertFalse(i != Instance(int))
+        self.assertTrue(i != self.float)
+        self.assertFalse(i == self.float)
+        self.assertNotEqual(i, None)
+
+    def test_generic_with_one_arg(self):
+        g = Generic('List', [self.int])
+        self.assertEqual(g.typename, 'List')
+        self.assertEqual(str(g.args), '(Instance(int),)')
+        self.assertEqual(str(g), 'List[int]')
+        self.assertEqual(repr(g), 'List[int]')
+
+        self.assertEqual(g, Generic('List', [self.int]))
+        self.assertNotEqual(g, Generic('Set', [self.int]))
+        self.assertNotEqual(g, Generic('List', [self.float]))
+        self.assertNotEqual(g, self.int)
+
+    def test_generic_with_two_args(self):
+        g = Generic('Dict', (self.int, self.float))
+        self.assertEqual(g.typename, 'Dict')
+        self.assertEqual(str(g), 'Dict[int, float]')
+
+    def test_tuple(self):
+        t0 = Tuple(())
+        t1 = Tuple([self.int])
+        t2 = Tuple((self.float, self.int))
+        self.assertEqual(t0.itemtypes, ())
+        self.assertEqual(str(t1.itemtypes[0]), 'int')
+        self.assertEqual(str(t2.itemtypes[0]), 'float')
+        self.assertEqual(str(t2.itemtypes[1]), 'int')
+        self.assertEqual(str(t0), 'Tuple[]')
+        self.assertEqual(str(t1), 'Tuple[int]')
+        self.assertEqual(str(t2), 'Tuple[float, int]')
+
+        self.assertEqual(t1, Tuple([self.int]))
+        self.assertNotEqual(t1, Tuple([self.float]))
+        self.assertNotEqual(t1, Tuple([self.int, self.int]))
+        self.assertNotEqual(t1, self.int)
+
+    def test_either(self):
+        i = self.int
+        f = self.float
+        s = Instance(str)
+
+        e2 = Union((i, f))
+        self.assertEqual(len(e2.types), 2)
+        self.assertEqual(str(e2), 'Union[float, int]')
+
+        self.assertEqual(e2, Union((i, f)))
+        self.assertEqual(e2, Union((f, i)))
+        self.assertNotEqual(e2, Union((i, s)))
+        self.assertNotEqual(e2, Union((i, f, s)))
+        self.assertNotEqual(Union((i, f, s)), e2)
+        self.assertNotEqual(e2, i)
+
+    def test_either_as_optional(self):
+        optint = Union((self.int, None))
+        self.assertEqual(str(optint), 'Optional[int]')
+        optfloat = Union((None, self.float))
+        self.assertEqual(str(optfloat), 'Optional[float]')
+        eithernone = Union((self.int, self.float, None))
+        self.assertEqual(str(eithernone), 'Union[None, float, int]')
+
+    def test_unknown(self):
+        unknown = Unknown()
+        self.assertEqual(str(unknown), 'Unknown')
+        self.assertEqual(repr(unknown), 'Unknown()')
+
+        self.assertEqual(unknown, Unknown())
+        self.assertNotEqual(unknown, self.int)
+
+    def test_combine_types(self):
+        i = self.int
+        f = self.float
+        s = Instance(str)
+        c = Instance(complex)
+        class Foo: pass
+        o = Instance(Foo)
+
+        # Simple types
+        self.assert_combine(i, i, i)
+        self.assert_combine(s, s, s)
+        self.assert_combine(i, s, Union((i, s)))
+        self.assert_combine(i, None, Union((i, None)))
+        # Unknowns
+        self.assert_combine(i, Unknown(), i)
+        self.assert_combine(Unknown(), Unknown(), Unknown())
+        # Union types
+        self.assert_combine(o, Union((f, s)), Union((o, f, s)))
+        self.assert_combine(i, Union((i, s)), Union((i, s)))
+        self.assert_combine(Union((o, f)), Union((o, s)), Union((o, f, s)))
+        # Tuple types
+        self.assert_combine(Tuple([i, i]), Tuple([i, i]), Tuple([i, i]))
+        self.assert_combine(Tuple([i, i]), Tuple([o, s]),
+                            Tuple([Union([o, i]), Union([s, i])]))
+        # Numeric types
+        self.assert_combine(i, f, f)
+        self.assert_combine(i, c, c)
+        self.assert_combine(c, f, c)
+        # Unions with numerics
+        self.assert_combine(i, Union((o, f)), Union((o, f)))
+        self.assert_combine(Union((o, f)), i, Union((o, f)))
+        self.assert_combine(Union((o, i)), f, Union((o, f)))
+        # Tuples with numerics
+        self.assert_combine(Tuple([i, i]), Tuple([f, i]), Tuple([f, i]))
+        self.assert_combine(Tuple([i, i]), Tuple([f, o]), Tuple([f, Union((i, o))]))
+        self.assert_combine(Tuple([f, i]), Tuple([i, o]), Tuple([f, Union((i, o))]))
+
+    def test_combine_special_cases(self):
+        i = self.int
+        f = self.float
+        u = Unknown()
+        def list_(x):
+            return Generic('List', [x])
+        # Simplify generic types.
+        self.assert_combine(list_(i), list_(u), list_(i))
+
+    def assert_combine(self, t, s, combined):
+        self.assertEqual(pinfer.combine_types(t, s), combined)
+        self.assertEqual(pinfer.combine_types(s, t), combined)
+
+    def test_sample(self):
+        sample = pinfer.sample
+        self.assertEqual(sample(()), [])
+        self.assertEqual(sample((1, 2)), [1, 2])
+        self.assertEqual(sample([]), [])
+        self.assertEqual(sample([1]), [1])
+        self.assertEqual(sample([1, 2]), [1, 2])
+        # TODO larger collections
+
+    def test_infer_simple_value_type(self):
+        self.assert_infer_type(1, 'int')
+        self.assert_infer_type('', 'str')
+        self.assert_infer_type(None, 'None')
+
+    def test_infer_collection_type(self):
+        # List
+        self.assert_infer_type([], 'List[Unknown]')
+        self.assert_infer_type([1], 'List[int]')
+        self.assert_infer_type([1, None], 'List[Optional[int]]')
+        # Dict
+        self.assert_infer_type({1: 'x', 2: None},
+                               'Dict[int, Optional[str]]')
+        # Set
+        self.assert_infer_type({1, None}, 'Set[Optional[int]]')
+        # Tuple
+        self.assert_infer_type((1, 'x'), 'Tuple[int, str]')
+        self.assert_infer_type((1, None) * 100, 'TupleSequence[Optional[int]]')
+
+    def assert_infer_type(self, value, type):
+        self.assertEqual(str(pinfer.infer_value_type(value)), type)
+
+    def test_infer_variables(self):
+        pinfer.infer_var('x', 1)
+        self.assert_infer_state('x: int')
+        pinfer.infer_var('x', 1)
+        pinfer.infer_var('x', None)
+        pinfer.infer_var('y', 1.1)
+        self.assert_infer_state('x: Optional[int]\n'
+                                'y: float')
+
+    def test_infer_instance_var(self):
+        class A: pass
+        a = A()
+        a.x = 1
+        a.y = 'x'
+        pinfer.infer_attrs(a)
+        self.assert_infer_state('A.x: int\n'
+                                'A.y: str')
+
+    def test_infer_class_var(self):
+        class A:
+            x = 1.1
+        pinfer.infer_attrs(A())
+        self.assert_infer_state('A.x: float')
+
+    def test_infer_function_attr(self):
+        class A:
+            def f(self): pass
+        a = A()
+        a.g = lambda x: 1
+        pinfer.infer_attrs(a)
+        self.assert_infer_state('A.g: Callable')
+
+    def test_infer_simple_function_signature(self):
+        @pinfer.infer_signature
+        def f(a):
+            return 'x'
+        f(1)
+        f(None)
+        self.assertEqual(f.__name__, 'f')
+        self.assert_infer_state('def f(a: Optional[int]) -> str')
+
+    def test_infer_function_with_two_args(self):
+        @pinfer.infer_signature
+        def f(x, y):
+            return x * y
+        f(1, 2)
+        f(1, 'x')
+        self.assert_infer_state(
+            'def f(x: int, y: Union[int, str]) -> Union[int, str]')
+
+    def test_infer_method(self):
+        class A:
+            @pinfer.infer_signature
+            def f(self, x): pass
+        A().f('x')
+        self.assert_infer_state('def f(self, x: str) -> None')
+
+    def test_infer_default_arg_values(self):
+        @pinfer.infer_signature
+        def f(x=1, y=None): pass
+        f()
+        self.assert_infer_state('def f(x: int, y: None) -> None')
+        f('x')
+        f('x', 1.1)
+        f()
+        self.assert_infer_state(
+            'def f(x: Union[int, str], y: Optional[float]) -> None')
+
+    def test_infer_varargs(self):
+        @pinfer.infer_signature
+        def f(x, *y): pass
+        f(1)
+        f(1, 'x', None)
+        self.assert_infer_state('def f(x: int, *y: Optional[str]) -> None')
+        f(1)
+        self.assert_infer_state('def f(x: int, *y: Unknown) -> None')
+
+    def test_infer_keyword_args(self):
+        @pinfer.infer_signature
+        def f(x): pass
+        f(x=1)
+        self.assert_infer_state('def f(x: int) -> None')
+
+        @pinfer.infer_signature
+        def f(x='x'): pass
+        f(x=1)
+        self.assert_infer_state('def f(x: int) -> None')
+
+    def test_infer_keyword_varargs(self):
+        @pinfer.infer_signature
+        def f(a, **kwargs): pass
+        f(None, x=1, y='x')
+        self.assert_infer_state(
+            'def f(a: None, **kwargs: Union[int, str]) -> None')
+
+    def test_infer_class(self):
+        @pinfer.infer_class
+        class A:
+            def f(self, x): return 0
+        A().f('x')
+        self.assert_infer_state('class A(...):\n'
+                                '    def f(self, x: str) -> int')
+
+        @pinfer.infer_class
+        class A:
+            def f(self, x): return 0
+        @pinfer.infer_class
+        class B:
+            def f(self): pass
+            def g(self): pass
+        A().f('')
+        B().f()
+        B().g()
+        self.assert_infer_state('class A(...):\n'
+                                '    def f(self, x: str) -> int\n'
+                                'class B(...):\n'
+                                '    def f(self) -> None\n'
+                                '    def g(self) -> None')
+
+    def assert_infer_state(self, expected):
+        state = pinfer.format_state()
+        self.assertEqual(state, expected)
+        pinfer.reset()
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pinfer/test_pinfer3.py b/pinfer/test_pinfer3.py
new file mode 100644
index 0000000..688e8c0
--- /dev/null
+++ b/pinfer/test_pinfer3.py
@@ -0,0 +1,31 @@
+""" tests cases that require python3 syntax """
+
+import unittest
+import pinfer
+
+# Include all of the shared unit tests
+from test_pinfer import TestInfer
+
+
+class TestInfer3(unittest.TestCase):
+    def test_infer_keyword_only_args(self):
+        # decorators break the parsing
+        def f(x, *, y=0): pass
+        f = pinfer.infer_signature(f)
+        f(1, y='x')
+        self.assert_infer_state(
+            'def f(x: int, *, y: str = 0) -> None')
+
+        def f(*, x=None, y=None): pass
+        f = pinfer.infer_signature(f)
+        f(y='x')
+        self.assert_infer_state(
+            'def f(*, x: None = None, y: str = None) -> None')
+
+    def assert_infer_state(self, expected):
+        state = pinfer.format_state()
+        self.assertEqual(state, expected)
+        pinfer.reset()
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pinfer/unparse.py b/pinfer/unparse.py
new file mode 100644
index 0000000..6e1e493
--- /dev/null
+++ b/pinfer/unparse.py
@@ -0,0 +1,610 @@
+# From Python 2's Demo/parser/unparse.py
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
+
+"Usage: unparse.py <path to source file>"
+import sys
+import ast
+import cStringIO
+import os
+
+# Large float and imaginary literals get turned into infinities in the AST.
+# We unparse those infinities to INFSTR.
+INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
+
+def interleave(inter, f, seq):
+    """Call f on each item in seq, calling inter() in between.
+    """
+    seq = iter(seq)
+    try:
+        f(next(seq))
+    except StopIteration:
+        pass
+    else:
+        for x in seq:
+            inter()
+            f(x)
+
+class Unparser:
+    """Methods in this class recursively traverse an AST and
+    output source code for the abstract syntax; original formatting
+    is disregarded. """
+
+    def __init__(self, tree, file = sys.stdout):
+        """Unparser(tree, file=sys.stdout) -> None.
+         Print the source for tree to file."""
+        self.f = file
+        self.future_imports = []
+        self._indent = 0
+        self.dispatch(tree)
+        self.f.write("")
+        self.f.flush()
+
+    def fill(self, text = ""):
+        "Indent a piece of text, according to the current indentation level"
+        self.f.write("\n"+"    "*self._indent + text)
+
+    def write(self, text):
+        "Append a piece of text to the current line."
+        self.f.write(text)
+
+    def enter(self):
+        "Print ':', and increase the indentation."
+        self.write(":")
+        self._indent += 1
+
+    def leave(self):
+        "Decrease the indentation level."
+        self._indent -= 1
+
+    def dispatch(self, tree):
+        "Dispatcher function, dispatching tree type T to method _T."
+        if isinstance(tree, list):
+            for t in tree:
+                self.dispatch(t)
+            return
+        meth = getattr(self, "_"+tree.__class__.__name__)
+        meth(tree)
+
+
+    ############### Unparsing methods ######################
+    # There should be one method per concrete grammar type #
+    # Constructors should be grouped by sum type. Ideally, #
+    # this would follow the order in the grammar, but      #
+    # currently doesn't.                                   #
+    ########################################################
+
+    def _Module(self, tree):
+        for stmt in tree.body:
+            self.dispatch(stmt)
+
+    # stmt
+    def _Expr(self, tree):
+        self.fill()
+        self.dispatch(tree.value)
+
+    def _Import(self, t):
+        self.fill("import ")
+        interleave(lambda: self.write(", "), self.dispatch, t.names)
+
+    def _ImportFrom(self, t):
+        # A from __future__ import may affect unparsing, so record it.
+        if t.module and t.module == '__future__':
+            self.future_imports.extend(n.name for n in t.names)
+
+        self.fill("from ")
+        self.write("." * t.level)
+        if t.module:
+            self.write(t.module)
+        self.write(" import ")
+        interleave(lambda: self.write(", "), self.dispatch, t.names)
+
+    def _Assign(self, t):
+        self.fill()
+        for target in t.targets:
+            self.dispatch(target)
+            self.write(" = ")
+        self.dispatch(t.value)
+
+    def _AugAssign(self, t):
+        self.fill()
+        self.dispatch(t.target)
+        self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
+        self.dispatch(t.value)
+
+    def _Return(self, t):
+        self.fill("return")
+        if t.value:
+            self.write(" ")
+            self.dispatch(t.value)
+
+    def _Pass(self, t):
+        self.fill("pass")
+
+    def _Break(self, t):
+        self.fill("break")
+
+    def _Continue(self, t):
+        self.fill("continue")
+
+    def _Delete(self, t):
+        self.fill("del ")
+        interleave(lambda: self.write(", "), self.dispatch, t.targets)
+
+    def _Assert(self, t):
+        self.fill("assert ")
+        self.dispatch(t.test)
+        if t.msg:
+            self.write(", ")
+            self.dispatch(t.msg)
+
+    def _Exec(self, t):
+        self.fill("exec ")
+        self.dispatch(t.body)
+        if t.globals:
+            self.write(" in ")
+            self.dispatch(t.globals)
+        if t.locals:
+            self.write(", ")
+            self.dispatch(t.locals)
+
+    def _Print(self, t):
+        self.fill("print ")
+        do_comma = False
+        if t.dest:
+            self.write(">>")
+            self.dispatch(t.dest)
+            do_comma = True
+        for e in t.values:
+            if do_comma:self.write(", ")
+            else:do_comma=True
+            self.dispatch(e)
+        if not t.nl:
+            self.write(",")
+
+    def _Global(self, t):
+        self.fill("global ")
+        interleave(lambda: self.write(", "), self.write, t.names)
+
+    def _Yield(self, t):
+        self.write("(")
+        self.write("yield")
+        if t.value:
+            self.write(" ")
+            self.dispatch(t.value)
+        self.write(")")
+
+    def _Raise(self, t):
+        self.fill('raise ')
+        if t.type:
+            self.dispatch(t.type)
+        if t.inst:
+            self.write(", ")
+            self.dispatch(t.inst)
+        if t.tback:
+            self.write(", ")
+            self.dispatch(t.tback)
+
+    def _TryExcept(self, t):
+        self.fill("try")
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+        for ex in t.handlers:
+            self.dispatch(ex)
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _TryFinally(self, t):
+        if len(t.body) == 1 and isinstance(t.body[0], ast.TryExcept):
+            # try-except-finally
+            self.dispatch(t.body)
+        else:
+            self.fill("try")
+            self.enter()
+            self.dispatch(t.body)
+            self.leave()
+
+        self.fill("finally")
+        self.enter()
+        self.dispatch(t.finalbody)
+        self.leave()
+
+    def _ExceptHandler(self, t):
+        self.fill("except")
+        if t.type:
+            self.write(" ")
+            self.dispatch(t.type)
+        if t.name:
+            self.write(" as ")
+            self.dispatch(t.name)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _ClassDef(self, t):
+        self.write("\n")
+        for deco in t.decorator_list:
+            self.fill("@")
+            self.dispatch(deco)
+        self.fill("class "+t.name)
+        if t.bases:
+            self.write("(")
+            for a in t.bases:
+                self.dispatch(a)
+                self.write(", ")
+            self.write(")")
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _FunctionDef(self, t):
+        self.write("\n")
+        for deco in t.decorator_list:
+            self.fill("@")
+            self.dispatch(deco)
+        self.fill("def "+t.name + "(")
+        self.dispatch(t.args)
+        self.write(")")
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _For(self, t):
+        self.fill("for ")
+        self.dispatch(t.target)
+        self.write(" in ")
+        self.dispatch(t.iter)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _If(self, t):
+        self.fill("if ")
+        self.dispatch(t.test)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        # collapse nested ifs into equivalent elifs.
+        while (t.orelse and len(t.orelse) == 1 and
+               isinstance(t.orelse[0], ast.If)):
+            t = t.orelse[0]
+            self.fill("elif ")
+            self.dispatch(t.test)
+            self.enter()
+            self.dispatch(t.body)
+            self.leave()
+        # final else
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _While(self, t):
+        self.fill("while ")
+        self.dispatch(t.test)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _With(self, t):
+        self.fill("with ")
+        self.dispatch(t.context_expr)
+        if t.optional_vars:
+            self.write(" as ")
+            self.dispatch(t.optional_vars)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    # expr
+    def _Str(self, tree):
+        # if from __future__ import unicode_literals is in effect,
+        # then we want to output string literals using a 'b' prefix
+        # and unicode literals with no prefix.
+        if "unicode_literals" not in self.future_imports:
+            self.write(repr(tree.s))
+        elif isinstance(tree.s, str):
+            self.write("b" + repr(tree.s))
+        elif isinstance(tree.s, unicode):
+            self.write(repr(tree.s).lstrip("u"))
+        else:
+            assert False, "shouldn't get here"
+
+    def _Name(self, t):
+        self.write(t.id)
+
+    def _Repr(self, t):
+        self.write("`")
+        self.dispatch(t.value)
+        self.write("`")
+
+    def _Num(self, t):
+        repr_n = repr(t.n)
+        # Parenthesize negative numbers, to avoid turning (-1)**2 into -1**2.
+        if repr_n.startswith("-"):
+            self.write("(")
+        # Substitute overflowing decimal literal for AST infinities.
+        self.write(repr_n.replace("inf", INFSTR))
+        if repr_n.startswith("-"):
+            self.write(")")
+
+    def _List(self, t):
+        self.write("[")
+        interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write("]")
+
+    def _ListComp(self, t):
+        self.write("[")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("]")
+
+    def _GeneratorExp(self, t):
+        self.write("(")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write(")")
+
+    def _SetComp(self, t):
+        self.write("{")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("}")
+
+    def _DictComp(self, t):
+        self.write("{")
+        self.dispatch(t.key)
+        self.write(": ")
+        self.dispatch(t.value)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("}")
+
+    def _comprehension(self, t):
+        self.write(" for ")
+        self.dispatch(t.target)
+        self.write(" in ")
+        self.dispatch(t.iter)
+        for if_clause in t.ifs:
+            self.write(" if ")
+            self.dispatch(if_clause)
+
+    def _IfExp(self, t):
+        self.write("(")
+        self.dispatch(t.body)
+        self.write(" if ")
+        self.dispatch(t.test)
+        self.write(" else ")
+        self.dispatch(t.orelse)
+        self.write(")")
+
+    def _Set(self, t):
+        assert(t.elts) # should be at least one element
+        self.write("{")
+        interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write("}")
+
+    def _Dict(self, t):
+        self.write("{")
+        def write_pair(pair):
+            (k, v) = pair
+            self.dispatch(k)
+            self.write(": ")
+            self.dispatch(v)
+        interleave(lambda: self.write(", "), write_pair, zip(t.keys, t.values))
+        self.write("}")
+
+    def _Tuple(self, t):
+        self.write("(")
+        if len(t.elts) == 1:
+            (elt,) = t.elts
+            self.dispatch(elt)
+            self.write(",")
+        else:
+            interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write(")")
+
+    unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
+    def _UnaryOp(self, t):
+        self.write("(")
+        self.write(self.unop[t.op.__class__.__name__])
+        self.write(" ")
+        # If we're applying unary minus to a number, parenthesize the number.
+        # This is necessary: -2147483648 is different from -(2147483648) on
+        # a 32-bit machine (the first is an int, the second a long), and
+        # -7j is different from -(7j).  (The first has real part 0.0, the second
+        # has real part -0.0.)
+        if isinstance(t.op, ast.USub) and isinstance(t.operand, ast.Num):
+            self.write("(")
+            self.dispatch(t.operand)
+            self.write(")")
+        else:
+            self.dispatch(t.operand)
+        self.write(")")
+
+    binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
+                    "LShift":"<<", "RShift":">>", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
+                    "FloorDiv":"//", "Pow": "**"}
+    def _BinOp(self, t):
+        self.write("(")
+        self.dispatch(t.left)
+        self.write(" " + self.binop[t.op.__class__.__name__] + " ")
+        self.dispatch(t.right)
+        self.write(")")
+
+    cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
+                        "Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
+    def _Compare(self, t):
+        self.write("(")
+        self.dispatch(t.left)
+        for o, e in zip(t.ops, t.comparators):
+            self.write(" " + self.cmpops[o.__class__.__name__] + " ")
+            self.dispatch(e)
+        self.write(")")
+
+    boolops = {ast.And: 'and', ast.Or: 'or'}
+    def _BoolOp(self, t):
+        self.write("(")
+        s = " %s " % self.boolops[t.op.__class__]
+        interleave(lambda: self.write(s), self.dispatch, t.values)
+        self.write(")")
+
+    def _Attribute(self,t):
+        self.dispatch(t.value)
+        # Special case: 3.__abs__() is a syntax error, so if t.value
+        # is an integer literal then we need to either parenthesize
+        # it or add an extra space to get 3 .__abs__().
+        if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
+            self.write(" ")
+        self.write(".")
+        self.write(t.attr)
+
+    def _Call(self, t):
+        self.dispatch(t.func)
+        self.write("(")
+        comma = False
+        for e in t.args:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        for e in t.keywords:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        if t.starargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("*")
+            self.dispatch(t.starargs)
+        if t.kwargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("**")
+            self.dispatch(t.kwargs)
+        self.write(")")
+
+    def _Subscript(self, t):
+        self.dispatch(t.value)
+        self.write("[")
+        self.dispatch(t.slice)
+        self.write("]")
+
+    # slice
+    def _Ellipsis(self, t):
+        self.write("...")
+
+    def _Index(self, t):
+        self.dispatch(t.value)
+
+    def _Slice(self, t):
+        if t.lower:
+            self.dispatch(t.lower)
+        self.write(":")
+        if t.upper:
+            self.dispatch(t.upper)
+        if t.step:
+            self.write(":")
+            self.dispatch(t.step)
+
+    def _ExtSlice(self, t):
+        interleave(lambda: self.write(', '), self.dispatch, t.dims)
+
+    # others
+    def _arguments(self, t):
+        first = True
+        # normal arguments
+        defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
+        for a,d in zip(t.args, defaults):
+            if first:first = False
+            else: self.write(", ")
+            self.dispatch(a),
+            if d:
+                self.write("=")
+                self.dispatch(d)
+
+        # varargs
+        if t.vararg:
+            if first:first = False
+            else: self.write(", ")
+            self.write("*")
+            self.write(t.vararg)
+
+        # kwargs
+        if t.kwarg:
+            if first:first = False
+            else: self.write(", ")
+            self.write("**"+t.kwarg)
+
+    def _keyword(self, t):
+        self.write(t.arg)
+        self.write("=")
+        self.dispatch(t.value)
+
+    def _Lambda(self, t):
+        self.write("(")
+        self.write("lambda ")
+        self.dispatch(t.args)
+        self.write(": ")
+        self.dispatch(t.body)
+        self.write(")")
+
+    def _alias(self, t):
+        self.write(t.name)
+        if t.asname:
+            self.write(" as "+t.asname)
+
+def roundtrip(filename, output=sys.stdout):
+    with open(filename, "r") as pyfile:
+        source = pyfile.read()
+    tree = compile(source, filename, "exec", ast.PyCF_ONLY_AST)
+    Unparser(tree, output)
+
+
+
+def testdir(a):
+    try:
+        names = [n for n in os.listdir(a) if n.endswith('.py')]
+    except OSError:
+        sys.stderr.write("Directory not readable: %s" % a)
+    else:
+        for n in names:
+            fullname = os.path.join(a, n)
+            if os.path.isfile(fullname):
+                output = cStringIO.StringIO()
+                print 'Testing %s' % fullname
+                try:
+                    roundtrip(fullname, output)
+                except Exception as e:
+                    print '  Failed to compile, exception is %s' % repr(e)
+            elif os.path.isdir(fullname):
+                testdir(fullname)
+
+def main(args):
+    if args[0] == '--testdir':
+        for a in args[1:]:
+            testdir(a)
+    else:
+        for a in args:
+            roundtrip(a)
+
+if __name__=='__main__':
+    main(sys.argv[1:])
diff --git a/pinfer/unparse3.py b/pinfer/unparse3.py
new file mode 100644
index 0000000..0936cb2
--- /dev/null
+++ b/pinfer/unparse3.py
@@ -0,0 +1,610 @@
+# From Python 3's Tools/parser/unparse.py
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
+
+"Usage: unparse.py <path to source file>"
+import sys
+import ast
+import tokenize
+import io
+import os
+
+# Large float and imaginary literals get turned into infinities in the AST.
+# We unparse those infinities to INFSTR.
+INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
+
+def interleave(inter, f, seq):
+    """Call f on each item in seq, calling inter() in between.
+    """
+    seq = iter(seq)
+    try:
+        f(next(seq))
+    except StopIteration:
+        pass
+    else:
+        for x in seq:
+            inter()
+            f(x)
+
+class Unparser:
+    """Methods in this class recursively traverse an AST and
+    output source code for the abstract syntax; original formatting
+    is disregarded. """
+
+    def __init__(self, tree, file = sys.stdout):
+        """Unparser(tree, file=sys.stdout) -> None.
+         Print the source for tree to file."""
+        self.f = file
+        self._indent = 0
+        self.dispatch(tree)
+        print("", file=self.f)
+        self.f.flush()
+
+    def fill(self, text = ""):
+        "Indent a piece of text, according to the current indentation level"
+        self.f.write("\n"+"    "*self._indent + text)
+
+    def write(self, text):
+        "Append a piece of text to the current line."
+        self.f.write(text)
+
+    def enter(self):
+        "Print ':', and increase the indentation."
+        self.write(":")
+        self._indent += 1
+
+    def leave(self):
+        "Decrease the indentation level."
+        self._indent -= 1
+
+    def dispatch(self, tree):
+        "Dispatcher function, dispatching tree type T to method _T."
+        if isinstance(tree, list):
+            for t in tree:
+                self.dispatch(t)
+            return
+        meth = getattr(self, "_"+tree.__class__.__name__)
+        meth(tree)
+
+
+    ############### Unparsing methods ######################
+    # There should be one method per concrete grammar type #
+    # Constructors should be grouped by sum type. Ideally, #
+    # this would follow the order in the grammar, but      #
+    # currently doesn't.                                   #
+    ########################################################
+
+    def _Module(self, tree):
+        for stmt in tree.body:
+            self.dispatch(stmt)
+
+    # stmt
+    def _Expr(self, tree):
+        self.fill()
+        self.dispatch(tree.value)
+
+    def _Import(self, t):
+        self.fill("import ")
+        interleave(lambda: self.write(", "), self.dispatch, t.names)
+
+    def _ImportFrom(self, t):
+        self.fill("from ")
+        self.write("." * t.level)
+        if t.module:
+            self.write(t.module)
+        self.write(" import ")
+        interleave(lambda: self.write(", "), self.dispatch, t.names)
+
+    def _Assign(self, t):
+        self.fill()
+        for target in t.targets:
+            self.dispatch(target)
+            self.write(" = ")
+        self.dispatch(t.value)
+
+    def _AugAssign(self, t):
+        self.fill()
+        self.dispatch(t.target)
+        self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
+        self.dispatch(t.value)
+
+    def _Return(self, t):
+        self.fill("return")
+        if t.value:
+            self.write(" ")
+            self.dispatch(t.value)
+
+    def _Pass(self, t):
+        self.fill("pass")
+
+    def _Break(self, t):
+        self.fill("break")
+
+    def _Continue(self, t):
+        self.fill("continue")
+
+    def _Delete(self, t):
+        self.fill("del ")
+        interleave(lambda: self.write(", "), self.dispatch, t.targets)
+
+    def _Assert(self, t):
+        self.fill("assert ")
+        self.dispatch(t.test)
+        if t.msg:
+            self.write(", ")
+            self.dispatch(t.msg)
+
+    def _Global(self, t):
+        self.fill("global ")
+        interleave(lambda: self.write(", "), self.write, t.names)
+
+    def _Nonlocal(self, t):
+        self.fill("nonlocal ")
+        interleave(lambda: self.write(", "), self.write, t.names)
+
+    def _Yield(self, t):
+        self.write("(")
+        self.write("yield")
+        if t.value:
+            self.write(" ")
+            self.dispatch(t.value)
+        self.write(")")
+
+    def _YieldFrom(self, t):
+        self.write("(")
+        self.write("yield from")
+        if t.value:
+            self.write(" ")
+            self.dispatch(t.value)
+        self.write(")")
+
+    def _Raise(self, t):
+        self.fill("raise")
+        if not t.exc:
+            assert not t.cause
+            return
+        self.write(" ")
+        self.dispatch(t.exc)
+        if t.cause:
+            self.write(" from ")
+            self.dispatch(t.cause)
+
+    def _Try(self, t):
+        self.fill("try")
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        for ex in t.handlers:
+            self.dispatch(ex)
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+        if t.finalbody:
+            self.fill("finally")
+            self.enter()
+            self.dispatch(t.finalbody)
+            self.leave()
+
+    def _ExceptHandler(self, t):
+        self.fill("except")
+        if t.type:
+            self.write(" ")
+            self.dispatch(t.type)
+        if t.name:
+            self.write(" as ")
+            self.write(t.name)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _ClassDef(self, t):
+        self.write("\n")
+        for deco in t.decorator_list:
+            self.fill("@")
+            self.dispatch(deco)
+        self.fill("class "+t.name)
+        self.write("(")
+        comma = False
+        for e in t.bases:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        for e in t.keywords:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        if t.starargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("*")
+            self.dispatch(t.starargs)
+        if t.kwargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("**")
+            self.dispatch(t.kwargs)
+        self.write(")")
+
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _FunctionDef(self, t):
+        self.write("\n")
+        for deco in t.decorator_list:
+            self.fill("@")
+            self.dispatch(deco)
+        self.fill("def "+t.name + "(")
+        self.dispatch(t.args)
+        self.write(")")
+        if t.returns:
+            self.write(" -> ")
+            self.dispatch(t.returns)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _For(self, t):
+        self.fill("for ")
+        self.dispatch(t.target)
+        self.write(" in ")
+        self.dispatch(t.iter)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _If(self, t):
+        self.fill("if ")
+        self.dispatch(t.test)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        # collapse nested ifs into equivalent elifs.
+        while (t.orelse and len(t.orelse) == 1 and
+               isinstance(t.orelse[0], ast.If)):
+            t = t.orelse[0]
+            self.fill("elif ")
+            self.dispatch(t.test)
+            self.enter()
+            self.dispatch(t.body)
+            self.leave()
+        # final else
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _While(self, t):
+        self.fill("while ")
+        self.dispatch(t.test)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _With(self, t):
+        self.fill("with ")
+        interleave(lambda: self.write(", "), self.dispatch, t.items)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    # expr
+    def _Bytes(self, t):
+        self.write(repr(t.s))
+
+    def _Str(self, tree):
+        self.write(repr(tree.s))
+
+    def _Name(self, t):
+        self.write(t.id)
+
+    def _NameConstant(self, t):
+        self.write(repr(t.value))
+
+    def _Num(self, t):
+        # Substitute overflowing decimal literal for AST infinities.
+        self.write(repr(t.n).replace("inf", INFSTR))
+
+    def _List(self, t):
+        self.write("[")
+        interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write("]")
+
+    def _ListComp(self, t):
+        self.write("[")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("]")
+
+    def _GeneratorExp(self, t):
+        self.write("(")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write(")")
+
+    def _SetComp(self, t):
+        self.write("{")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("}")
+
+    def _DictComp(self, t):
+        self.write("{")
+        self.dispatch(t.key)
+        self.write(": ")
+        self.dispatch(t.value)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("}")
+
+    def _comprehension(self, t):
+        self.write(" for ")
+        self.dispatch(t.target)
+        self.write(" in ")
+        self.dispatch(t.iter)
+        for if_clause in t.ifs:
+            self.write(" if ")
+            self.dispatch(if_clause)
+
+    def _IfExp(self, t):
+        self.write("(")
+        self.dispatch(t.body)
+        self.write(" if ")
+        self.dispatch(t.test)
+        self.write(" else ")
+        self.dispatch(t.orelse)
+        self.write(")")
+
+    def _Set(self, t):
+        assert(t.elts) # should be at least one element
+        self.write("{")
+        interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write("}")
+
+    def _Dict(self, t):
+        self.write("{")
+        def write_pair(pair):
+            (k, v) = pair
+            self.dispatch(k)
+            self.write(": ")
+            self.dispatch(v)
+        interleave(lambda: self.write(", "), write_pair, zip(t.keys, t.values))
+        self.write("}")
+
+    def _Tuple(self, t):
+        self.write("(")
+        if len(t.elts) == 1:
+            (elt,) = t.elts
+            self.dispatch(elt)
+            self.write(",")
+        else:
+            interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write(")")
+
+    unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
+    def _UnaryOp(self, t):
+        self.write("(")
+        self.write(self.unop[t.op.__class__.__name__])
+        self.write(" ")
+        self.dispatch(t.operand)
+        self.write(")")
+
+    binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
+                    "LShift":"<<", "RShift":">>", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
+                    "FloorDiv":"//", "Pow": "**"}
+    def _BinOp(self, t):
+        self.write("(")
+        self.dispatch(t.left)
+        self.write(" " + self.binop[t.op.__class__.__name__] + " ")
+        self.dispatch(t.right)
+        self.write(")")
+
+    cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
+                        "Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
+    def _Compare(self, t):
+        self.write("(")
+        self.dispatch(t.left)
+        for o, e in zip(t.ops, t.comparators):
+            self.write(" " + self.cmpops[o.__class__.__name__] + " ")
+            self.dispatch(e)
+        self.write(")")
+
+    boolops = {ast.And: 'and', ast.Or: 'or'}
+    def _BoolOp(self, t):
+        self.write("(")
+        s = " %s " % self.boolops[t.op.__class__]
+        interleave(lambda: self.write(s), self.dispatch, t.values)
+        self.write(")")
+
+    def _Attribute(self,t):
+        self.dispatch(t.value)
+        # Special case: 3.__abs__() is a syntax error, so if t.value
+        # is an integer literal then we need to either parenthesize
+        # it or add an extra space to get 3 .__abs__().
+        if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
+            self.write(" ")
+        self.write(".")
+        self.write(t.attr)
+
+    def _Call(self, t):
+        self.dispatch(t.func)
+        self.write("(")
+        comma = False
+        for e in t.args:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        for e in t.keywords:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        if t.starargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("*")
+            self.dispatch(t.starargs)
+        if t.kwargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("**")
+            self.dispatch(t.kwargs)
+        self.write(")")
+
+    def _Subscript(self, t):
+        self.dispatch(t.value)
+        self.write("[")
+        self.dispatch(t.slice)
+        self.write("]")
+
+    def _Starred(self, t):
+        self.write("*")
+        self.dispatch(t.value)
+
+    # slice
+    def _Ellipsis(self, t):
+        self.write("...")
+
+    def _Index(self, t):
+        self.dispatch(t.value)
+
+    def _Slice(self, t):
+        if t.lower:
+            self.dispatch(t.lower)
+        self.write(":")
+        if t.upper:
+            self.dispatch(t.upper)
+        if t.step:
+            self.write(":")
+            self.dispatch(t.step)
+
+    def _ExtSlice(self, t):
+        interleave(lambda: self.write(', '), self.dispatch, t.dims)
+
+    # argument
+    def _arg(self, t):
+        self.write(t.arg)
+        if t.annotation:
+            self.write(": ")
+            self.dispatch(t.annotation)
+
+    # others
+    def _arguments(self, t):
+        first = True
+        # normal arguments
+        defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
+        for a, d in zip(t.args, defaults):
+            if first:first = False
+            else: self.write(", ")
+            self.dispatch(a)
+            if d:
+                self.write("=")
+                self.dispatch(d)
+
+        # varargs, or bare '*' if no varargs but keyword-only arguments present
+        if t.vararg or t.kwonlyargs:
+            if first:first = False
+            else: self.write(", ")
+            self.write("*")
+            if t.vararg:
+                self.write(t.vararg.arg)
+                if t.vararg.annotation:
+                    self.write(": ")
+                    self.dispatch(t.vararg.annotation)
+
+        # keyword-only arguments
+        if t.kwonlyargs:
+            for a, d in zip(t.kwonlyargs, t.kw_defaults):
+                if first:first = False
+                else: self.write(", ")
+                self.dispatch(a),
+                if d:
+                    self.write("=")
+                    self.dispatch(d)
+
+        # kwargs
+        if t.kwarg:
+            if first:first = False
+            else: self.write(", ")
+            self.write("**"+t.kwarg.arg)
+            if t.kwarg.annotation:
+                self.write(": ")
+                self.dispatch(t.kwarg.annotation)
+
+    def _keyword(self, t):
+        self.write(t.arg)
+        self.write("=")
+        self.dispatch(t.value)
+
+    def _Lambda(self, t):
+        self.write("(")
+        self.write("lambda ")
+        self.dispatch(t.args)
+        self.write(": ")
+        self.dispatch(t.body)
+        self.write(")")
+
+    def _alias(self, t):
+        self.write(t.name)
+        if t.asname:
+            self.write(" as "+t.asname)
+
+    def _withitem(self, t):
+        self.dispatch(t.context_expr)
+        if t.optional_vars:
+            self.write(" as ")
+            self.dispatch(t.optional_vars)
+
+def roundtrip(filename, output=sys.stdout):
+    with open(filename, "rb") as pyfile:
+        encoding = tokenize.detect_encoding(pyfile.readline)[0]
+    with open(filename, "r", encoding=encoding) as pyfile:
+        source = pyfile.read()
+    tree = compile(source, filename, "exec", ast.PyCF_ONLY_AST)
+    Unparser(tree, output)
+
+
+
+def testdir(a):
+    try:
+        names = [n for n in os.listdir(a) if n.endswith('.py')]
+    except OSError:
+        print("Directory not readable: %s" % a, file=sys.stderr)
+    else:
+        for n in names:
+            fullname = os.path.join(a, n)
+            if os.path.isfile(fullname):
+                output = io.StringIO()
+                print('Testing %s' % fullname)
+                try:
+                    roundtrip(fullname, output)
+                except Exception as e:
+                    print('  Failed to compile, exception is %s' % repr(e))
+            elif os.path.isdir(fullname):
+                testdir(fullname)
+
+def main(args):
+    if args[0] == '--testdir':
+        for a in args[1:]:
+            testdir(a)
+    else:
+        for a in args:
+            roundtrip(a)
+
+if __name__=='__main__':
+    main(sys.argv[1:])
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000..cfbac25
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,21 @@
+[pytest]
+# testpaths is new in 2.8
+minversion = 2.8
+
+testpaths = mypy/test
+
+python_files = test*.py
+
+# Where do the test cases come from?  We provide our own collection
+# logic by implementing `pytest_pycollect_makeitem` in mypy.test.data;
+# the test files import that module, and pytest sees the magic name
+# and invokes it at the relevant moment.  See
+# http://doc.pytest.org/en/latest/writing_plugins.html#collection-hooks
+
+# Because we provide our own collection logic, disable the default
+# python collector by giving it empty patterns to search for.
+python_classes =
+python_functions =
+
+# always run in parallel (requires pytest-xdist, see test-requirements.txt)
+addopts = -nauto --cov-append --cov-report=
diff --git a/runtests.py b/runtests.py
new file mode 100755
index 0000000..634b4ce
--- /dev/null
+++ b/runtests.py
@@ -0,0 +1,461 @@
+#!/usr/bin/env python3
+"""Mypy test runner."""
+
+from typing import Dict, List, Optional, Set, Iterable
+
+from mypy.waiter import Waiter, LazySubprocess
+from mypy import util
+from mypy.test.config import test_data_prefix
+from mypy.test.testpythoneval import python_eval_files, python_34_eval_files
+
+import itertools
+import os
+from os.path import join, isdir
+import re
+import sys
+
+
+def get_versions():  # type: () -> List[str]
+    major = sys.version_info[0]
+    minor = sys.version_info[1]
+    if major == 2:
+        return ['2.7']
+    else:
+        # generates list of python versions to use.
+        # For Python2, this is only [2.7].
+        # Otherwise, it is [3.4, 3.3, 3.2, 3.1, 3.0].
+        return ['%d.%d' % (major, i) for i in range(minor, -1, -1)]
+
+
+# Ideally, all tests would be `discover`able so that they can be driven
+# (and parallelized) by an external test driver.
+
+class Driver:
+
+    def __init__(self, *, whitelist: List[str], blacklist: List[str],
+            lf: bool, ff: bool,
+            arglist: List[str], pyt_arglist: List[str],
+            verbosity: int, parallel_limit: int,
+            xfail: List[str], coverage: bool) -> None:
+        self.whitelist = whitelist
+        self.blacklist = blacklist
+        self.arglist = arglist
+        self.pyt_arglist = pyt_arglist
+        self.verbosity = verbosity
+        self.waiter = Waiter(verbosity=verbosity, limit=parallel_limit, xfail=xfail, lf=lf, ff=ff)
+        self.versions = get_versions()
+        self.cwd = os.getcwd()
+        self.mypy = os.path.join(self.cwd, 'scripts', 'mypy')
+        self.env = dict(os.environ)
+        self.coverage = coverage
+
+    def prepend_path(self, name: str, paths: List[str]) -> None:
+        old_val = self.env.get(name)
+        paths = [p for p in paths if isdir(p)]
+        if not paths:
+            return
+        if old_val is not None:
+            new_val = os.pathsep.join(itertools.chain(paths, [old_val]))
+        else:
+            new_val = os.pathsep.join(paths)
+        self.env[name] = new_val
+
+    def allow(self, name: str) -> bool:
+        if any(f in name for f in self.whitelist):
+            if not any(f in name for f in self.blacklist):
+                if self.verbosity >= 2:
+                    print('SELECT   #%d %s' % (len(self.waiter.queue), name))
+                return True
+        if self.verbosity >= 3:
+            print('OMIT     %s' % name)
+        return False
+
+    def add_mypy_cmd(self, name: str, mypy_args: List[str], cwd: Optional[str] = None) -> None:
+        full_name = 'check %s' % name
+        if not self.allow(full_name):
+            return
+        args = [sys.executable, self.mypy] + mypy_args
+        args.append('--show-traceback')
+        self.waiter.add(LazySubprocess(full_name, args, cwd=cwd, env=self.env))
+
+    def add_mypy(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
+        self.add_mypy_cmd(name, list(args), cwd=cwd)
+
+    def add_mypy_modules(self, name: str, modules: Iterable[str],
+                         cwd: Optional[str] = None) -> None:
+        args = list(itertools.chain(*(['-m', mod] for mod in modules)))
+        self.add_mypy_cmd(name, args, cwd=cwd)
+
+    def add_mypy_package(self, name: str, packagename: str, *flags: str) -> None:
+        self.add_mypy_cmd(name, ['-p', packagename] + list(flags))
+
+    def add_mypy_string(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
+        self.add_mypy_cmd(name, ['-c'] + list(args), cwd=cwd)
+
+    def add_pytest(self, name: str, pytest_args: List[str], coverage: bool = False) -> None:
+        full_name = 'pytest %s' % name
+        if not self.allow(full_name):
+            return
+        if coverage and self.coverage:
+            args = [sys.executable, '-m', 'pytest', '--cov=mypy'] + pytest_args
+        else:
+            args = [sys.executable, '-m', 'pytest'] + pytest_args
+
+        self.waiter.add(LazySubprocess(full_name, args, env=self.env, passthrough=self.verbosity),
+                        sequential=True)
+
+    def add_python(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
+        name = 'run %s' % name
+        if not self.allow(name):
+            return
+        largs = list(args)
+        largs[0:0] = [sys.executable]
+        env = self.env
+        self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
+
+    def add_python_mod(self, name: str, *args: str, cwd: Optional[str] = None,
+                       coverage: bool = False) -> None:
+        name = 'run %s' % name
+        if not self.allow(name):
+            return
+        largs = list(args)
+        if coverage and self.coverage:
+            largs[0:0] = ['coverage', 'run', '-m']
+        else:
+            largs[0:0] = [sys.executable, '-m']
+        env = self.env
+        self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
+
+    def add_python_string(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
+        name = 'run %s' % name
+        if not self.allow(name):
+            return
+        largs = list(args)
+        largs[0:0] = [sys.executable, '-c']
+        env = self.env
+        self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
+
+    def add_python2(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
+        name = 'run2 %s' % name
+        if not self.allow(name):
+            return
+        largs = list(args)
+        python2 = util.try_find_python2_interpreter()
+        assert python2, "Couldn't find a Python 2.7 interpreter"
+        largs[0:0] = [python2]
+        env = self.env
+        self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
+
+    def add_flake8(self, cwd: Optional[str] = None) -> None:
+        name = 'lint'
+        if not self.allow(name):
+            return
+        largs = ['flake8', '-j0']
+        env = self.env
+        self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
+
+    def list_tasks(self) -> None:
+        for id, task in enumerate(self.waiter.queue):
+            print('{id}:{task}'.format(id=id, task=task.name))
+
+
+def add_basic(driver: Driver) -> None:
+    if False:
+        driver.add_mypy('file setup.py', 'setup.py')
+    driver.add_mypy('file runtests.py', 'runtests.py')
+    driver.add_mypy('legacy entry script', 'scripts/mypy')
+    driver.add_mypy('legacy myunit script', 'scripts/myunit')
+    # needs typed_ast installed:
+    driver.add_mypy('fast-parse', '--fast-parse', 'test-data/samples/hello.py')
+
+
+def add_selftypecheck(driver: Driver) -> None:
+    driver.add_mypy_package('package mypy nonstrict optional', 'mypy', '--config-file',
+                            'mypy_self_check.ini')
+    driver.add_mypy_package('package mypy', 'mypy', '--config-file', 'mypy_strict_optional.ini')
+
+
+def find_files(base: str, prefix: str = '', suffix: str = '') -> List[str]:
+    return [join(root, f)
+            for root, dirs, files in os.walk(base)
+            for f in files
+            if f.startswith(prefix) and f.endswith(suffix)]
+
+
+def file_to_module(file: str) -> str:
+    rv = os.path.splitext(file)[0].replace(os.sep, '.')
+    if rv.endswith('.__init__'):
+        rv = rv[:-len('.__init__')]
+    return rv
+
+
+def add_imports(driver: Driver) -> None:
+    # Make sure each module can be imported originally.
+    # There is currently a bug in mypy where a module can pass typecheck
+    # because of *implicit* imports from other modules.
+    for f in find_files('mypy', suffix='.py'):
+        mod = file_to_module(f)
+        if not mod.endswith('.__main__'):
+            driver.add_python_string('import %s' % mod, 'import %s' % mod)
+
+
+PYTEST_FILES = [os.path.join('mypy', 'test', '{}.py'.format(name)) for name in [
+    'testcheck',
+    'testextensions',
+    'testdeps',
+    'testdiff',
+    'testfinegrained',
+    'testmerge',
+]]
+
+
+def add_pytest(driver: Driver) -> None:
+    driver.add_pytest('pytest', PYTEST_FILES + driver.arglist + driver.pyt_arglist, True)
+
+
+def add_myunit(driver: Driver) -> None:
+    for f in find_files('mypy', prefix='test', suffix='.py'):
+        mod = file_to_module(f)
+        if mod in ('mypy.test.testpythoneval', 'mypy.test.testcmdline'):
+            # Run Python evaluation integration tests and command-line
+            # parsing tests separately since they are much slower than
+            # proper unit tests.
+            pass
+        elif f in PYTEST_FILES:
+            # This module has been converted to pytest; don't try to use myunit.
+            pass
+        else:
+            driver.add_python_mod('unit-test %s' % mod, 'mypy.myunit', '-m', mod,
+                                  *driver.arglist, coverage=True)
+
+
+def add_pythoneval(driver: Driver) -> None:
+    cases = set()
+    case_re = re.compile(r'^\[case ([^\]]+)\]$')
+    for file in python_eval_files + python_34_eval_files:
+        with open(os.path.join(test_data_prefix, file), 'r') as f:
+            for line in f:
+                m = case_re.match(line)
+                if m:
+                    case_name = m.group(1)
+                    assert case_name[:4] == 'test'
+                    cases.add(case_name[4:5])
+
+    for prefix in sorted(cases):
+        driver.add_python_mod(
+            'eval-test-' + prefix,
+            'mypy.myunit',
+            '-m',
+            'mypy.test.testpythoneval',
+            'test_testpythoneval_PythonEvaluationSuite.test' + prefix + '*',
+            *driver.arglist,
+            coverage=True
+        )
+
+
+def add_cmdline(driver: Driver) -> None:
+    driver.add_python_mod('cmdline-test', 'mypy.myunit',
+                          '-m', 'mypy.test.testcmdline', *driver.arglist,
+                         coverage=True)
+
+
+def add_stubs(driver: Driver) -> None:
+    # We only test each module in the one version mypy prefers to find.
+    # TODO: test stubs for other versions, especially Python 2 stubs.
+
+    modules = set()  # type: Set[str]
+    modules.add('typing')
+    # TODO: This should also test Python 2, and pass pyversion accordingly.
+    for version in ["2and3", "3", "3.3", "3.4", "3.5"]:
+        for stub_type in ['builtins', 'stdlib', 'third_party']:
+            stubdir = join('typeshed', stub_type, version)
+            for f in find_files(stubdir, suffix='.pyi'):
+                module = file_to_module(f[len(stubdir) + 1:])
+                modules.add(module)
+
+    driver.add_mypy_modules('stubs', sorted(modules))
+
+
+def add_stdlibsamples(driver: Driver) -> None:
+    seen = set()  # type: Set[str]
+    for version in driver.versions:
+        stdlibsamples_dir = join(driver.cwd, 'test-data', 'stdlib-samples', version)
+        modules = []  # type: List[str]
+        for f in find_files(stdlibsamples_dir, prefix='test_', suffix='.py'):
+            module = file_to_module(f[len(stdlibsamples_dir) + 1:])
+            if module not in seen:
+                seen.add(module)
+                modules.append(module)
+        if modules:
+            driver.add_mypy_modules('stdlibsamples (%s)' % (version,), modules,
+                                    cwd=stdlibsamples_dir)
+
+
+def add_samples(driver: Driver) -> None:
+    for f in find_files(os.path.join('test-data', 'samples'), suffix='.py'):
+        driver.add_mypy('file %s' % f, f)
+
+
+def usage(status: int) -> None:
+    print('Usage: %s [-h | -v | -q | --lf | --ff | [-x] FILTER | -a ARG | -p ARG]'
+          '... [-- FILTER ...]'
+          % sys.argv[0])
+    print()
+    print('Run mypy tests. If given no arguments, run all tests.')
+    print()
+    print('Examples:')
+    print('  %s unit-test  (run unit tests only)' % sys.argv[0])
+    print('  %s unit-test -a "*tuple*"' % sys.argv[0])
+    print('       (run all unit tests with "tuple" in test name)')
+    print()
+    print('Options:')
+    print('  -h, --help             show this help')
+    print('  -v, --verbose          increase driver verbosity')
+    print('  --lf                   rerun only the tests that failed at the last run')
+    print('  --ff                   run all tests but run the last failures first')
+    print('  -q, --quiet            decrease driver verbosity')
+    print('  -jN                    run N tasks at once (default: one per CPU)')
+    print('  -a, --argument ARG     pass an argument to myunit tasks')
+    print('  -p, --pytest_arg ARG   pass an argument to pytest tasks')
+    print('                         (-v: verbose; glob pattern: filter by test name)')
+    print('  -l, --list             list included tasks (after filtering) and exit')
+    print('  FILTER                 include tasks matching FILTER')
+    print('  -x, --exclude FILTER   exclude tasks matching FILTER')
+    print('  -c, --coverage         calculate code coverage while running tests')
+    print('  --                     treat all remaining arguments as positional')
+    sys.exit(status)
+
+
+def sanity() -> None:
+    paths = os.getenv('PYTHONPATH')
+    if paths is None:
+        return
+    failed = False
+    for p in paths.split(os.pathsep):
+        if not os.path.isabs(p):
+            print('Relative PYTHONPATH entry %r' % p)
+            failed = True
+    if failed:
+        print('Please use absolute so that chdir() tests can work.')
+        print('Cowardly refusing to continue.')
+        sys.exit(1)
+
+
+def main() -> None:
+    import time
+    t0 = time.perf_counter()
+    sanity()
+
+    verbosity = 0
+    parallel_limit = 0
+    whitelist = []  # type: List[str]
+    blacklist = []  # type: List[str]
+    arglist = []  # type: List[str]
+    pyt_arglist = []  # type: List[str]
+    lf = False
+    ff = False
+    list_only = False
+    coverage = False
+
+    allow_opts = True
+    curlist = whitelist
+    for a in sys.argv[1:]:
+        if not (curlist is arglist or curlist is pyt_arglist) and allow_opts and a.startswith('-'):
+            if curlist is not whitelist:
+                break
+            if a == '--':
+                allow_opts = False
+            elif a == '-v' or a == '--verbose':
+                verbosity += 1
+            elif a == '-q' or a == '--quiet':
+                verbosity -= 1
+            elif a.startswith('-j'):
+                try:
+                    parallel_limit = int(a[2:])
+                except ValueError:
+                    usage(1)
+            elif a == '-x' or a == '--exclude':
+                curlist = blacklist
+            elif a == '-a' or a == '--argument':
+                curlist = arglist
+            elif a == '-p' or a == '--pytest_arg':
+                curlist = pyt_arglist
+            # will also pass this option to pytest
+            elif a == '--lf':
+                lf = True
+            # will also pass this option to pytest
+            elif a == '--ff':
+                ff = True
+            elif a == '-l' or a == '--list':
+                list_only = True
+            elif a == '-c' or a == '--coverage':
+                coverage = True
+            elif a == '-h' or a == '--help':
+                usage(0)
+            else:
+                usage(1)
+        else:
+            curlist.append(a)
+            curlist = whitelist
+    if curlist is blacklist:
+        sys.exit('-x must be followed by a filter')
+    if curlist is arglist:
+        sys.exit('-a must be followed by an argument')
+    if curlist is pyt_arglist:
+        sys.exit('-p must be followed by an argument')
+    if lf and ff:
+        sys.exit('use either --lf or --ff, not both')
+    # empty string is a substring of all names
+    if not whitelist:
+        whitelist.append('')
+    if lf:
+        pyt_arglist.append('--lf')
+    if ff:
+        pyt_arglist.append('--ff')
+    if verbosity >= 1:
+        pyt_arglist.extend(['-v'] * verbosity)
+    elif verbosity < 0:
+        pyt_arglist.extend(['-q'] * (-verbosity))
+    if parallel_limit:
+        if '-n' not in pyt_arglist:
+            pyt_arglist.append('-n{}'.format(parallel_limit))
+
+    driver = Driver(whitelist=whitelist, blacklist=blacklist, lf=lf, ff=ff,
+                    arglist=arglist, pyt_arglist=pyt_arglist, verbosity=verbosity,
+                    parallel_limit=parallel_limit, xfail=[], coverage=coverage)
+
+    driver.prepend_path('PATH', [join(driver.cwd, 'scripts')])
+    driver.prepend_path('MYPYPATH', [driver.cwd])
+    driver.prepend_path('PYTHONPATH', [driver.cwd])
+
+    driver.add_flake8()
+    add_pytest(driver)
+    add_pythoneval(driver)
+    add_cmdline(driver)
+    add_basic(driver)
+    add_selftypecheck(driver)
+    add_myunit(driver)
+    add_imports(driver)
+    add_stubs(driver)
+    add_stdlibsamples(driver)
+    add_samples(driver)
+
+    if list_only:
+        driver.list_tasks()
+        return
+
+    exit_code = driver.waiter.run()
+    t1 = time.perf_counter()
+    print('total runtime:', t1 - t0, 'sec')
+
+    if verbosity >= 1:
+        times = driver.waiter.times2 if verbosity >= 2 else driver.waiter.times1
+        times_sortable = ((t, tp) for (tp, t) in times.items())
+        for total_time, test_type in sorted(times_sortable, reverse=True):
+            print('total time in %s: %f' % (test_type, total_time))
+
+    sys.exit(exit_code)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/scripts/__pycache__/dumpmodule.cpython-36.pyc b/scripts/__pycache__/dumpmodule.cpython-36.pyc
new file mode 100644
index 0000000..52e88ed
Binary files /dev/null and b/scripts/__pycache__/dumpmodule.cpython-36.pyc differ
diff --git a/test-data/.flake8 b/test-data/.flake8
new file mode 100644
index 0000000..df2f9ca
--- /dev/null
+++ b/test-data/.flake8
@@ -0,0 +1,22 @@
+# Some PEP8 deviations are considered irrelevant to stub files:
+# (error counts as of 2016-12-19)
+# 17381 E704 multiple statements on one line (def)
+# 11840 E301 expected 1 blank line
+#  7467 E302 expected 2 blank lines
+#  1772 E501 line too long
+#  1487 F401 imported but unused
+#  1248 E701 multiple statements on one line (colon)
+#   427 F811 redefinition
+#   356 E305 expected 2 blank lines
+
+# Nice-to-haves ignored for now
+#   152 E128 continuation line under-indented for visual indent
+#    43 E127 continuation line over-indented for visual indent
+
+[flake8]
+ignore = F401, F811, E127, E128, E301, E302, E305, E501, E701, E704, B303
+# We are checking with Python 3 but many of the stubs are Python 2 stubs.
+# A nice future improvement would be to provide separate .flake8
+# configurations for Python 2 and Python 3 files.
+builtins = StandardError,apply,basestring,buffer,cmp,coerce,execfile,file,intern,long,raw_input,reduce,reload,unichr,unicode,xrange
+exclude = .venv*,@*
diff --git a/test-data/samples/bottles.py b/test-data/samples/bottles.py
new file mode 100644
index 0000000..ddf77f5
--- /dev/null
+++ b/test-data/samples/bottles.py
@@ -0,0 +1,13 @@
+import typing
+
+REFRAIN = '''
+%d bottles of beer on the wall,
+%d bottles of beer,
+take one down, pass it around,
+%d bottles of beer on the wall!
+'''
+bottles_of_beer = 99
+while bottles_of_beer > 1:
+    print(REFRAIN % (bottles_of_beer, bottles_of_beer,
+          bottles_of_beer - 1))
+    bottles_of_beer -= 1
diff --git a/test-data/samples/class.py b/test-data/samples/class.py
new file mode 100644
index 0000000..d2eb4ac
--- /dev/null
+++ b/test-data/samples/class.py
@@ -0,0 +1,18 @@
+import typing
+
+
+class BankAccount(object):
+    def __init__(self, initial_balance: int = 0) -> None:
+        self.balance = initial_balance
+
+    def deposit(self, amount: int) -> None:
+        self.balance += amount
+
+    def withdraw(self, amount: int) -> None:
+        self.balance -= amount
+
+    def overdrawn(self) -> bool:
+        return self.balance < 0
+my_account = BankAccount(15)
+my_account.withdraw(5)
+print(my_account.balance)
diff --git a/test-data/samples/cmdline.py b/test-data/samples/cmdline.py
new file mode 100644
index 0000000..105c27a
--- /dev/null
+++ b/test-data/samples/cmdline.py
@@ -0,0 +1,8 @@
+# This program adds up integers in the command line
+import sys
+import typing
+try:
+    total = sum(int(arg) for arg in sys.argv[1:])
+    print('sum =', total)
+except ValueError:
+    print('Please supply integer arguments')
diff --git a/test-data/samples/crawl.py b/test-data/samples/crawl.py
new file mode 100644
index 0000000..8c3ff15
--- /dev/null
+++ b/test-data/samples/crawl.py
@@ -0,0 +1,863 @@
+#!/usr/bin/env python3.4
+
+"""A simple web crawler."""
+
+# This is cloned from <asyncio>/examples/crawl.py,
+# with type annotations added (PEP 484).
+#
+# TODO: convert to `async def` + `await` (PEP 492).
+
+import argparse
+import asyncio
+import cgi
+from http.client import BadStatusLine
+import logging
+import re
+import sys
+import time
+import urllib.parse
+from typing import Any, Generator, IO, Optional, Sequence, Set, Tuple, List, Dict
+
+
+ARGS = argparse.ArgumentParser(description="Web crawler")
+ARGS.add_argument(
+    '--iocp', action='store_true', dest='iocp',
+    default=False, help='Use IOCP event loop (Windows only)')
+ARGS.add_argument(
+    '--select', action='store_true', dest='select',
+    default=False, help='Use Select event loop instead of default')
+ARGS.add_argument(
+    'roots', nargs='*',
+    default=[], help='Root URL (may be repeated)')
+ARGS.add_argument(
+    '--max_redirect', action='store', type=int, metavar='N',
+    default=10, help='Limit redirection chains (for 301, 302 etc.)')
+ARGS.add_argument(
+    '--max_tries', action='store', type=int, metavar='N',
+    default=4, help='Limit retries on network errors')
+ARGS.add_argument(
+    '--max_tasks', action='store', type=int, metavar='N',
+    default=100, help='Limit concurrent connections')
+ARGS.add_argument(
+    '--max_pool', action='store', type=int, metavar='N',
+    default=100, help='Limit connection pool size')
+ARGS.add_argument(
+    '--exclude', action='store', metavar='REGEX',
+    help='Exclude matching URLs')
+ARGS.add_argument(
+    '--strict', action='store_true',
+    default=True, help='Strict host matching (default)')
+ARGS.add_argument(
+    '--lenient', action='store_false', dest='strict',
+    default=False, help='Lenient host matching')
+ARGS.add_argument(
+    '-v', '--verbose', action='count', dest='level',
+    default=1, help='Verbose logging (repeat for more verbose)')
+ARGS.add_argument(
+    '-q', '--quiet', action='store_const', const=0, dest='level',
+    default=1, help='Quiet logging (opposite of --verbose)')
+
+
+ESCAPES = [('quot', '"'),
+           ('gt', '>'),
+           ('lt', '<'),
+           ('amp', '&')  # Must be last.
+           ]
+
+
+def unescape(url: str) -> str:
+    """Turn & into &, and so on.
+
+    This is the inverse of cgi.escape().
+    """
+    for name, char in ESCAPES:
+        url = url.replace('&' + name + ';', char)
+    return url
+
+
+def fix_url(url: str) -> str:
+    """Prefix a schema-less URL with http://."""
+    if '://' not in url:
+        url = 'http://' + url
+    return url
+
+
+class Logger:
+
+    def __init__(self, level: int) -> None:
+        self.level = level
+
+    def _log(self, n: int, args: Sequence[Any]) -> None:
+        if self.level >= n:
+            print(*args, file=sys.stderr, flush=True)
+
+    def log(self, n: int, *args: Any) -> None:
+        self._log(n, args)
+
+    def __call__(self, n: int, *args: Any) -> None:
+        self._log(n, args)
+
+
+KeyTuple = Tuple[str, int, bool]
+
+
+class ConnectionPool:
+    """A connection pool.
+
+    To open a connection, use reserve().  To recycle it, use unreserve().
+
+    The pool is mostly just a mapping from (host, port, ssl) tuples to
+    lists of Connections.  The currently active connections are *not*
+    in the data structure; get_connection() takes the connection out,
+    and recycle_connection() puts it back in.  To recycle a
+    connection, call conn.close(recycle=True).
+
+    There are limits to both the overall pool and the per-key pool.
+    """
+
+    def __init__(self, log: Logger, max_pool: int = 10, max_tasks: int = 5) -> None:
+        self.log = log
+        self.max_pool = max_pool  # Overall limit.
+        self.max_tasks = max_tasks  # Per-key limit.
+        self.loop = asyncio.get_event_loop()
+        self.connections = {}  # type: Dict[KeyTuple, List[Connection]]
+        self.queue = []  # type: List[Connection]
+
+    def close(self) -> None:
+        """Close all connections available for reuse."""
+        for conns in self.connections.values():
+            for conn in conns:
+                conn.close()
+        self.connections.clear()
+        self.queue.clear()
+
+    @asyncio.coroutine
+    def get_connection(self, host: str, port: int, ssl: bool) -> Generator[Any, None, 'Connection']:
+        """Create or reuse a connection."""
+        port = port or (443 if ssl else 80)
+        try:
+            ipaddrs = yield from self.loop.getaddrinfo(host, port)
+        except Exception as exc:
+            self.log(0, 'Exception %r for (%r, %r)' % (exc, host, port))
+            raise
+        self.log(1, '* %s resolves to %s' %
+                    (host, ', '.join(ip[4][0] for ip in ipaddrs)))
+
+        # Look for a reusable connection.
+        for _1, _2, _3, _4, (h, p, *_5) in ipaddrs:
+            key = h, p, ssl
+            conn = None
+            conns = self.connections.get(key)
+            while conns:
+                conn = conns.pop(0)
+                self.queue.remove(conn)
+                if not conns:
+                    del self.connections[key]
+                if conn.stale():
+                    self.log(1, 'closing stale connection for', key)
+                    conn.close()  # Just in case.
+                else:
+                    self.log(1, '* Reusing pooled connection', key,
+                                'FD =', conn.fileno())
+                    return conn
+
+        # Create a new connection.
+        conn = Connection(self.log, self, host, port, ssl)
+        yield from conn.connect()
+        self.log(1, '* New connection', conn.key, 'FD =', conn.fileno())
+        return conn
+
+    def recycle_connection(self, conn: 'Connection') -> None:
+        """Make a connection available for reuse.
+
+        This also prunes the pool if it exceeds the size limits.
+        """
+        if conn.stale():
+            conn.close()
+            return
+
+        key = conn.key
+        conns = self.connections.setdefault(key, [])
+        conns.append(conn)
+        self.queue.append(conn)
+
+        if len(conns) <= self.max_tasks and len(self.queue) <= self.max_pool:
+            return
+
+        # Prune the queue.
+
+        # Close stale connections for this key first.
+        stale = [conn for conn in conns if conn.stale()]
+        if stale:
+            for conn in stale:
+                conns.remove(conn)
+                self.queue.remove(conn)
+                self.log(1, 'closing stale connection for', key)
+                conn.close()
+            if not conns:
+                del self.connections[key]
+
+        # Close oldest connection(s) for this key if limit reached.
+        while len(conns) > self.max_tasks:
+            conn = conns.pop(0)
+            self.queue.remove(conn)
+            self.log(1, 'closing oldest connection for', key)
+            conn.close()
+
+        if len(self.queue) <= self.max_pool:
+            return
+
+        # Close overall stale connections.
+        stale = [conn for conn in self.queue if conn.stale()]
+        if stale:
+            for conn in stale:
+                conns = self.connections.get(conn.key)
+                conns.remove(conn)
+                self.queue.remove(conn)
+                self.log(1, 'closing stale connection for', key)
+                conn.close()
+
+        # Close oldest overall connection(s) if limit reached.
+        while len(self.queue) > self.max_pool:
+            conn = self.queue.pop(0)
+            conns = self.connections.get(conn.key)
+            c = conns.pop(0)
+            assert conn == c, (conn.key, conn, c, conns)
+            self.log(1, 'closing overall oldest connection for', conn.key)
+            conn.close()
+
+
+class Connection:
+
+    def __init__(self, log: Logger, pool: ConnectionPool, host: str, port: int, ssl: bool) -> None:
+        self.log = log
+        self.pool = pool
+        self.host = host
+        self.port = port
+        self.ssl = ssl
+        self.reader = None  # type: asyncio.StreamReader
+        self.writer = None  # type: asyncio.StreamWriter
+        self.key = None  # type: KeyTuple
+
+    def stale(self) -> bool:
+        return self.reader is None or self.reader.at_eof()
+
+    def fileno(self) -> Optional[int]:
+        writer = self.writer
+        if writer is not None:
+            transport = writer.transport
+            if transport is not None:
+                sock = transport.get_extra_info('socket')
+                if sock is not None:
+                    return sock.fileno()
+        return None
+
+    @asyncio.coroutine
+    def connect(self) -> Generator[Any, None, None]:
+        self.reader, self.writer = yield from asyncio.open_connection(
+            self.host, self.port, ssl=self.ssl)
+        peername = self.writer.get_extra_info('peername')
+        if peername:
+            self.host, self.port = peername[:2]
+        else:
+            self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl)
+        self.key = self.host, self.port, self.ssl
+
+    def close(self, recycle: bool = False) -> None:
+        if recycle and not self.stale():
+            self.pool.recycle_connection(self)
+        else:
+            self.writer.close()
+            self.pool = self.reader = self.writer = None
+
+
+class Request:
+    """HTTP request.
+
+    Use connect() to open a connection; send_request() to send the
+    request; get_response() to receive the response headers.
+    """
+
+    def __init__(self, log: Logger, url: str, pool: ConnectionPool) -> None:
+        self.log = log
+        self.url = url
+        self.pool = pool
+        self.parts = urllib.parse.urlparse(self.url)
+        self.scheme = self.parts.scheme
+        assert self.scheme in ('http', 'https'), repr(url)
+        self.ssl = self.parts.scheme == 'https'
+        self.netloc = self.parts.netloc
+        self.hostname = self.parts.hostname
+        self.port = self.parts.port or (443 if self.ssl else 80)
+        self.path = (self.parts.path or '/')
+        self.query = self.parts.query
+        if self.query:
+            self.full_path = '%s?%s' % (self.path, self.query)
+        else:
+            self.full_path = self.path
+        self.http_version = 'HTTP/1.1'
+        self.method = 'GET'
+        self.headers = []  # type: List[Tuple[str, str]]
+        self.conn = None  # type: Connection
+
+    @asyncio.coroutine
+    def connect(self) -> Generator[Any, None, None]:
+        """Open a connection to the server."""
+        self.log(1, '* Connecting to %s:%s using %s for %s' %
+                    (self.hostname, self.port,
+                     'ssl' if self.ssl else 'tcp',
+                     self.url))
+        self.conn = yield from self.pool.get_connection(self.hostname,
+                                                        self.port, self.ssl)
+
+    def close(self, recycle: bool = False) -> None:
+        """Close the connection, recycle if requested."""
+        if self.conn is not None:
+            if not recycle:
+                self.log(1, 'closing connection for', self.conn.key)
+            self.conn.close(recycle)
+            self.conn = None
+
+    @asyncio.coroutine
+    def putline(self, line: str) -> None:
+        """Write a line to the connection.
+
+        Used for the request line and headers.
+        """
+        self.log(2, '>', line)
+        self.conn.writer.write(line.encode('latin-1') + b'\r\n')
+
+    @asyncio.coroutine
+    def send_request(self) -> Generator[Any, None, None]:
+        """Send the request."""
+        request_line = '%s %s %s' % (self.method, self.full_path,
+                                     self.http_version)
+        yield from self.putline(request_line)
+        # TODO: What if a header is already set?
+        self.headers.append(('User-Agent', 'asyncio-example-crawl/0.0'))
+        self.headers.append(('Host', self.netloc))
+        self.headers.append(('Accept', '*/*'))
+        # self.headers.append(('Accept-Encoding', 'gzip'))
+        for key, value in self.headers:
+            line = '%s: %s' % (key, value)
+            yield from self.putline(line)
+        yield from self.putline('')
+
+    @asyncio.coroutine
+    def get_response(self) -> Generator[Any, None, 'Response']:
+        """Receive the response."""
+        response = Response(self.log, self.conn.reader)
+        yield from response.read_headers()
+        return response
+
+
+class Response:
+    """HTTP response.
+
+    Call read_headers() to receive the request headers.  Then check
+    the status attribute and call get_header() to inspect the headers.
+    Finally call read() to receive the body.
+    """
+
+    def __init__(self, log: Logger, reader: asyncio.StreamReader) -> None:
+        self.log = log
+        self.reader = reader
+        self.http_version = None  # type: str  # 'HTTP/1.1'
+        self.status = None  # type: int  # 200
+        self.reason = None  # type: str  # 'Ok'
+        self.headers = []  # type: List[Tuple[str, str]]  # [('Content-Type', 'text/html')]
+
+    @asyncio.coroutine
+    def getline(self) -> Generator[Any, None, str]:
+        """Read one line from the connection."""
+        line = (yield from self.reader.readline()).decode('latin-1').rstrip()
+        self.log(2, '<', line)
+        return line
+
+    @asyncio.coroutine
+    def read_headers(self) -> Generator[Any, None, None]:
+        """Read the response status and the request headers."""
+        status_line = yield from self.getline()
+        status_parts = status_line.split(None, 2)
+        if len(status_parts) != 3:
+            self.log(0, 'bad status_line', repr(status_line))
+            raise BadStatusLine(status_line)
+        self.http_version, status, self.reason = status_parts
+        self.status = int(status)
+        while True:
+            header_line = yield from self.getline()
+            if not header_line:
+                break
+            # TODO: Continuation lines.
+            key, value = header_line.split(':', 1)
+            self.headers.append((key, value.strip()))
+
+    def get_redirect_url(self, default: str = '') -> str:
+        """Inspect the status and return the redirect url if appropriate."""
+        if self.status not in (300, 301, 302, 303, 307):
+            return default
+        return self.get_header('Location', default)
+
+    def get_header(self, key: str, default: str = '') -> str:
+        """Get one header value, using a case insensitive header name."""
+        key = key.lower()
+        for k, v in self.headers:
+            if k.lower() == key:
+                return v
+        return default
+
+    @asyncio.coroutine
+    def read(self) -> Generator[Any, None, bytes]:
+        """Read the response body.
+
+        This honors Content-Length and Transfer-Encoding: chunked.
+        """
+        nbytes = None
+        for key, value in self.headers:
+            if key.lower() == 'content-length':
+                nbytes = int(value)
+                break
+        if nbytes is None:
+            if self.get_header('transfer-encoding').lower() == 'chunked':
+                self.log(2, 'parsing chunked response')
+                blocks = []
+                while True:
+                    size_header = yield from self.reader.readline()
+                    if not size_header:
+                        self.log(0, 'premature end of chunked response')
+                        break
+                    self.log(3, 'size_header =', repr(size_header))
+                    parts = size_header.split(b';')
+                    size = int(parts[0], 16)
+                    if size:
+                        self.log(3, 'reading chunk of', size, 'bytes')
+                        block = yield from self.reader.readexactly(size)
+                        assert len(block) == size, (len(block), size)
+                        blocks.append(block)
+                    crlf = yield from self.reader.readline()
+                    assert crlf == b'\r\n', repr(crlf)
+                    if not size:
+                        break
+                body = b''.join(blocks)
+                self.log(1, 'chunked response had', len(body),
+                            'bytes in', len(blocks), 'blocks')
+            else:
+                self.log(3, 'reading until EOF')
+                body = yield from self.reader.read()
+                # TODO: Should make sure not to recycle the connection
+                # in this case.
+        else:
+            body = yield from self.reader.readexactly(nbytes)
+        return body
+
+
+class Fetcher:
+    """Logic and state for one URL.
+
+    When found in crawler.busy, this represents a URL to be fetched or
+    in the process of being fetched; when found in crawler.done, this
+    holds the results from fetching it.
+
+    This is usually associated with a task.  This references the
+    crawler for the connection pool and to add more URLs to its todo
+    list.
+
+    Call fetch() to do the fetching, then report() to print the results.
+    """
+
+    def __init__(self, log: Logger, url: str, crawler: 'Crawler',
+                 max_redirect: int = 10, max_tries: int = 4) -> None:
+        self.log = log
+        self.url = url
+        self.crawler = crawler
+        # We don't loop resolving redirects here -- we just use this
+        # to decide whether to add the redirect URL to crawler.todo.
+        self.max_redirect = max_redirect
+        # But we do loop to retry on errors a few times.
+        self.max_tries = max_tries
+        # Everything we collect from the response goes here.
+        self.task = None  # type: asyncio.Task
+        self.exceptions = []  # type: List[Exception]
+        self.tries = 0
+        self.request = None  # type: Request
+        self.response = None  # type: Response
+        self.body = None  # type: bytes
+        self.next_url = None  # type: str
+        self.ctype = None  # type: str
+        self.pdict = None  # type: Dict[str, str]
+        self.encoding = None  # type: str
+        self.urls = None  # type: Set[str]
+        self.new_urls = None  # type: Set[str]
+
+    @asyncio.coroutine
+    def fetch(self) -> Generator[Any, None, None]:
+        """Attempt to fetch the contents of the URL.
+
+        If successful, and the data is HTML, extract further links and
+        add them to the crawler.  Redirects are also added back there.
+        """
+        while self.tries < self.max_tries:
+            self.tries += 1
+            self.request = None
+            try:
+                self.request = Request(self.log, self.url, self.crawler.pool)
+                yield from self.request.connect()
+                yield from self.request.send_request()
+                self.response = yield from self.request.get_response()
+                self.body = yield from self.response.read()
+                h_conn = self.response.get_header('connection').lower()
+                if h_conn != 'close':
+                    self.request.close(recycle=True)
+                    self.request = None
+                if self.tries > 1:
+                    self.log(1, 'try', self.tries, 'for', self.url, 'success')
+                break
+            except (BadStatusLine, OSError) as exc:
+                self.exceptions.append(exc)
+                self.log(1, 'try', self.tries, 'for', self.url,
+                            'raised', repr(exc))
+                # import pdb; pdb.set_trace()
+                # Don't reuse the connection in this case.
+            finally:
+                if self.request is not None:
+                    self.request.close()
+        else:
+            # We never broke out of the while loop, i.e. all tries failed.
+            self.log(0, 'no success for', self.url,
+                        'in', self.max_tries, 'tries')
+            return
+        next_url = self.response.get_redirect_url()
+        if next_url:
+            self.next_url = urllib.parse.urljoin(self.url, next_url)
+            if self.max_redirect > 0:
+                self.log(1, 'redirect to', self.next_url, 'from', self.url)
+                self.crawler.add_url(self.next_url, self.max_redirect - 1)
+            else:
+                self.log(0, 'redirect limit reached for', self.next_url,
+                            'from', self.url)
+        else:
+            if self.response.status == 200:
+                self.ctype = self.response.get_header('content-type')
+                self.pdict = {}
+                if self.ctype:
+                    self.ctype, self.pdict = cgi.parse_header(self.ctype)
+                self.encoding = self.pdict.get('charset', 'utf-8')
+                if self.ctype == 'text/html':
+                    body = self.body.decode(self.encoding, 'replace')
+                    # Replace href with (?:href|src) to follow image links.
+                    self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)',
+                                               body))
+                    if self.urls:
+                        self.log(1, 'got', len(self.urls),
+                                    'distinct urls from', self.url)
+                    self.new_urls = set()
+                    for url in self.urls:
+                        url = unescape(url)
+                        url = urllib.parse.urljoin(self.url, url)
+                        url, frag = urllib.parse.urldefrag(url)
+                        if self.crawler.add_url(url):
+                            self.new_urls.add(url)
+
+    def report(self, stats: 'Stats', file: IO[str] = None) -> None:
+        """Print a report on the state for this URL.
+
+        Also update the Stats instance.
+        """
+        if self.task is not None:
+            if not self.task.done():
+                stats.add('pending')
+                print(self.url, 'pending', file=file)
+                return
+            elif self.task.cancelled():
+                stats.add('cancelled')
+                print(self.url, 'cancelled', file=file)
+                return
+            elif self.task.exception():
+                stats.add('exception')
+                exc = self.task.exception()
+                stats.add('exception_' + exc.__class__.__name__)
+                print(self.url, exc, file=file)
+                return
+        if len(self.exceptions) == self.tries:
+            stats.add('fail')
+            exc = self.exceptions[-1]
+            stats.add('fail_' + str(exc.__class__.__name__))
+            print(self.url, 'error', exc, file=file)
+        elif self.next_url:
+            stats.add('redirect')
+            print(self.url, self.response.status, 'redirect', self.next_url,
+                  file=file)
+        elif self.ctype == 'text/html':
+            stats.add('html')
+            size = len(self.body or b'')
+            stats.add('html_bytes', size)
+            if self.log.level:
+                print(self.url, self.response.status,
+                      self.ctype, self.encoding,
+                      size,
+                      '%d/%d' % (len(self.new_urls or ()), len(self.urls or ())),
+                      file=file)
+        elif self.response is None:
+            print(self.url, 'no response object')
+        else:
+            size = len(self.body or b'')
+            if self.response.status == 200:
+                stats.add('other')
+                stats.add('other_bytes', size)
+            else:
+                stats.add('error')
+                stats.add('error_bytes', size)
+                stats.add('status_%s' % self.response.status)
+            print(self.url, self.response.status,
+                  self.ctype, self.encoding,
+                  size,
+                  file=file)
+
+
+class Stats:
+    """Record stats of various sorts."""
+
+    def __init__(self) -> None:
+        self.stats = {}  # type: Dict[str, int]
+
+    def add(self, key: str, count: int = 1) -> None:
+        self.stats[key] = self.stats.get(key, 0) + count
+
+    def report(self, file: IO[str] = None) -> None:
+        for key, count in sorted(self.stats.items()):
+            print('%10d' % count, key, file=file)
+
+
+class Crawler:
+    """Crawl a set of URLs.
+
+    This manages three disjoint sets of URLs (todo, busy, done).  The
+    data structures actually store dicts -- the values in todo give
+    the redirect limit, while the values in busy and done are Fetcher
+    instances.
+    """
+    def __init__(self, log: Logger,
+                 roots: Set[str], exclude: str = None, strict: bool = True,  # What to crawl.
+                 max_redirect: int = 10, max_tries: int = 4,  # Per-url limits.
+                 max_tasks: int = 10, max_pool: int = 10,  # Global limits.
+                 ) -> None:
+        self.log = log
+        self.roots = roots
+        self.exclude = exclude
+        self.strict = strict
+        self.max_redirect = max_redirect
+        self.max_tries = max_tries
+        self.max_tasks = max_tasks
+        self.max_pool = max_pool
+        self.todo = {}  # type: Dict[str, int]
+        self.busy = {}  # type: Dict[str, Fetcher]
+        self.done = {}  # type: Dict[str, Fetcher]
+        self.pool = ConnectionPool(self.log, max_pool, max_tasks)
+        self.root_domains = set()  # type: Set[str]
+        for root in roots:
+            host = urllib.parse.urlparse(root).hostname
+            if not host:
+                continue
+            if re.match(r'\A[\d\.]*\Z', host):
+                self.root_domains.add(host)
+            else:
+                host = host.lower()
+                if self.strict:
+                    self.root_domains.add(host)
+                    if host.startswith('www.'):
+                        self.root_domains.add(host[4:])
+                    else:
+                        self.root_domains.add('www.' + host)
+                else:
+                    parts = host.split('.')
+                    if len(parts) > 2:
+                        host = '.'.join(parts[-2:])
+                    self.root_domains.add(host)
+        for root in roots:
+            self.add_url(root)
+        self.governor = asyncio.Semaphore(max_tasks)
+        self.termination = asyncio.Condition()
+        self.t0 = time.time()
+        self.t1 = None  # type: Optional[float]
+
+    def close(self) -> None:
+        """Close resources (currently only the pool)."""
+        self.pool.close()
+
+    def host_okay(self, host: str) -> bool:
+        """Check if a host should be crawled.
+
+        A literal match (after lowercasing) is always good.  For hosts
+        that don't look like IP addresses, some approximate matches
+        are okay depending on the strict flag.
+        """
+        host = host.lower()
+        if host in self.root_domains:
+            return True
+        if re.match(r'\A[\d\.]*\Z', host):
+            return False
+        if self.strict:
+            return self._host_okay_strictish(host)
+        else:
+            return self._host_okay_lenient(host)
+
+    def _host_okay_strictish(self, host: str) -> bool:
+        """Check if a host should be crawled, strict-ish version.
+
+        This checks for equality modulo an initial 'www.' component.
+         """
+        if host.startswith('www.'):
+            if host[4:] in self.root_domains:
+                return True
+        else:
+            if 'www.' + host in self.root_domains:
+                return True
+        return False
+
+    def _host_okay_lenient(self, host: str) -> bool:
+        """Check if a host should be crawled, lenient version.
+
+        This compares the last two components of the host.
+        """
+        parts = host.split('.')
+        if len(parts) > 2:
+            host = '.'.join(parts[-2:])
+        return host in self.root_domains
+
+    def add_url(self, url: str, max_redirect: int = None) -> bool:
+        """Add a URL to the todo list if not seen before."""
+        if self.exclude and re.search(self.exclude, url):
+            return False
+        parsed = urllib.parse.urlparse(url)
+        if parsed.scheme not in ('http', 'https'):
+            self.log(2, 'skipping non-http scheme in', url)
+            return False
+        host = parsed.hostname
+        if not self.host_okay(host):
+            self.log(2, 'skipping non-root host in', url)
+            return False
+        if max_redirect is None:
+            max_redirect = self.max_redirect
+        if url in self.todo or url in self.busy or url in self.done:
+            return False
+        self.log(1, 'adding', url, max_redirect)
+        self.todo[url] = max_redirect
+        return True
+
+    @asyncio.coroutine
+    def crawl(self) -> Generator[Any, None, None]:
+        """Run the crawler until all finished."""
+        with (yield from self.termination):
+            while self.todo or self.busy:
+                if self.todo:
+                    url, max_redirect = self.todo.popitem()
+                    fetcher = Fetcher(self.log, url,
+                                      crawler=self,
+                                      max_redirect=max_redirect,
+                                      max_tries=self.max_tries,
+                                      )
+                    self.busy[url] = fetcher
+                    fetcher.task = asyncio.Task(self.fetch(fetcher))
+                else:
+                    yield from self.termination.wait()
+        self.t1 = time.time()
+
+    @asyncio.coroutine
+    def fetch(self, fetcher: Fetcher) -> Generator[Any, None, None]:
+        """Call the Fetcher's fetch(), with a limit on concurrency.
+
+        Once this returns, move the fetcher from busy to done.
+        """
+        url = fetcher.url
+        with (yield from self.governor):
+            try:
+                yield from fetcher.fetch()  # Fetcher gonna fetch.
+            finally:
+                # Force GC of the task, so the error is logged.
+                fetcher.task = None
+        with (yield from self.termination):
+            self.done[url] = fetcher
+            del self.busy[url]
+            self.termination.notify()
+
+    def report(self, file: IO[str] = None) -> None:
+        """Print a report on all completed URLs."""
+        if self.t1 is None:
+            self.t1 = time.time()
+        dt = self.t1 - self.t0
+        if dt and self.max_tasks:
+            speed = len(self.done) / dt / self.max_tasks
+        else:
+            speed = 0
+        stats = Stats()
+        print('*** Report ***', file=file)
+        try:
+            show = []  # type: List[Tuple[str, Fetcher]]
+            show.extend(self.done.items())
+            show.extend(self.busy.items())
+            show.sort()
+            for url, fetcher in show:
+                fetcher.report(stats, file=file)
+        except KeyboardInterrupt:
+            print('\nInterrupted', file=file)
+        print('Finished', len(self.done),
+              'urls in %.3f secs' % dt,
+              '(max_tasks=%d)' % self.max_tasks,
+              '(%.3f urls/sec/task)' % speed,
+              file=file)
+        stats.report(file=file)
+        print('Todo:', len(self.todo), file=file)
+        print('Busy:', len(self.busy), file=file)
+        print('Done:', len(self.done), file=file)
+        print('Date:', time.ctime(), 'local time', file=file)
+
+
+def main() -> None:
+    """Main program.
+
+    Parse arguments, set up event loop, run crawler, print report.
+    """
+    args = ARGS.parse_args()
+    if not args.roots:
+        print('Use --help for command line help')
+        return
+
+    log = Logger(args.level)
+
+    if args.iocp:
+        if sys.platform == 'win32':
+            from asyncio import ProactorEventLoop
+            loop = ProactorEventLoop()  # type: ignore
+            asyncio.set_event_loop(loop)
+        else:
+            assert False
+    elif args.select:
+        loop = asyncio.SelectorEventLoop()  # type: ignore
+        asyncio.set_event_loop(loop)
+    else:
+        loop = asyncio.get_event_loop()
+
+    roots = {fix_url(root) for root in args.roots}
+
+    crawler = Crawler(log,
+                      roots, exclude=args.exclude,
+                      strict=args.strict,
+                      max_redirect=args.max_redirect,
+                      max_tries=args.max_tries,
+                      max_tasks=args.max_tasks,
+                      max_pool=args.max_pool,
+                      )
+    try:
+        loop.run_until_complete(crawler.crawl())  # Crawler gonna crawl.
+    except KeyboardInterrupt:
+        sys.stderr.flush()
+        print('\nInterrupted\n')
+    finally:
+        crawler.report()
+        crawler.close()
+        loop.close()
+
+
+if __name__ == '__main__':
+    logging.basicConfig(level=logging.INFO) # type: ignore
+    main()
diff --git a/test-data/samples/crawl2.py b/test-data/samples/crawl2.py
new file mode 100644
index 0000000..fae5bc0
--- /dev/null
+++ b/test-data/samples/crawl2.py
@@ -0,0 +1,852 @@
+#!/usr/bin/env python3.4
+
+"""A simple web crawler."""
+
+# This is cloned from <asyncio>/examples/crawl.py,
+# with type annotations added (PEP 484).
+#
+# This version (crawl2.) has also been converted to use `async def` +
+# `await` (PEP 492).
+
+import argparse
+import asyncio
+import cgi
+from http.client import BadStatusLine
+import logging
+import re
+import sys
+import time
+import urllib.parse
+from typing import Any, Awaitable, IO, Optional, Sequence, Set, Tuple, List, Dict
+
+
+ARGS = argparse.ArgumentParser(description="Web crawler")
+ARGS.add_argument(
+    '--iocp', action='store_true', dest='iocp',
+    default=False, help='Use IOCP event loop (Windows only)')
+ARGS.add_argument(
+    '--select', action='store_true', dest='select',
+    default=False, help='Use Select event loop instead of default')
+ARGS.add_argument(
+    'roots', nargs='*',
+    default=[], help='Root URL (may be repeated)')
+ARGS.add_argument(
+    '--max_redirect', action='store', type=int, metavar='N',
+    default=10, help='Limit redirection chains (for 301, 302 etc.)')
+ARGS.add_argument(
+    '--max_tries', action='store', type=int, metavar='N',
+    default=4, help='Limit retries on network errors')
+ARGS.add_argument(
+    '--max_tasks', action='store', type=int, metavar='N',
+    default=100, help='Limit concurrent connections')
+ARGS.add_argument(
+    '--max_pool', action='store', type=int, metavar='N',
+    default=100, help='Limit connection pool size')
+ARGS.add_argument(
+    '--exclude', action='store', metavar='REGEX',
+    help='Exclude matching URLs')
+ARGS.add_argument(
+    '--strict', action='store_true',
+    default=True, help='Strict host matching (default)')
+ARGS.add_argument(
+    '--lenient', action='store_false', dest='strict',
+    default=False, help='Lenient host matching')
+ARGS.add_argument(
+    '-v', '--verbose', action='count', dest='level',
+    default=1, help='Verbose logging (repeat for more verbose)')
+ARGS.add_argument(
+    '-q', '--quiet', action='store_const', const=0, dest='level',
+    default=1, help='Quiet logging (opposite of --verbose)')
+
+
+ESCAPES = [('quot', '"'),
+           ('gt', '>'),
+           ('lt', '<'),
+           ('amp', '&')  # Must be last.
+           ]
+
+
+def unescape(url: str) -> str:
+    """Turn & into &, and so on.
+
+    This is the inverse of cgi.escape().
+    """
+    for name, char in ESCAPES:
+        url = url.replace('&' + name + ';', char)
+    return url
+
+
+def fix_url(url: str) -> str:
+    """Prefix a schema-less URL with http://."""
+    if '://' not in url:
+        url = 'http://' + url
+    return url
+
+
+class Logger:
+
+    def __init__(self, level: int) -> None:
+        self.level = level
+
+    def _log(self, n: int, args: Sequence[Any]) -> None:
+        if self.level >= n:
+            print(*args, file=sys.stderr, flush=True)
+
+    def log(self, n: int, *args: Any) -> None:
+        self._log(n, args)
+
+    def __call__(self, n: int, *args: Any) -> None:
+        self._log(n, args)
+
+
+KeyTuple = Tuple[str, int, bool]
+
+
+class ConnectionPool:
+    """A connection pool.
+
+    To open a connection, use reserve().  To recycle it, use unreserve().
+
+    The pool is mostly just a mapping from (host, port, ssl) tuples to
+    lists of Connections.  The currently active connections are *not*
+    in the data structure; get_connection() takes the connection out,
+    and recycle_connection() puts it back in.  To recycle a
+    connection, call conn.close(recycle=True).
+
+    There are limits to both the overall pool and the per-key pool.
+    """
+
+    def __init__(self, log: Logger, max_pool: int = 10, max_tasks: int = 5) -> None:
+        self.log = log
+        self.max_pool = max_pool  # Overall limit.
+        self.max_tasks = max_tasks  # Per-key limit.
+        self.loop = asyncio.get_event_loop()
+        self.connections = {}  # type: Dict[KeyTuple, List[Connection]]
+        self.queue = []  # type: List[Connection]
+
+    def close(self) -> None:
+        """Close all connections available for reuse."""
+        for conns in self.connections.values():
+            for conn in conns:
+                conn.close()
+        self.connections.clear()
+        self.queue.clear()
+
+    async def get_connection(self, host: str, port: int, ssl: bool) -> 'Connection':
+        """Create or reuse a connection."""
+        port = port or (443 if ssl else 80)
+        try:
+            ipaddrs = await self.loop.getaddrinfo(host, port)
+        except Exception as exc:
+            self.log(0, 'Exception %r for (%r, %r)' % (exc, host, port))
+            raise
+        self.log(1, '* %s resolves to %s' %
+                    (host, ', '.join(ip[4][0] for ip in ipaddrs)))
+
+        # Look for a reusable connection.
+        for _1, _2, _3, _4, (h, p, *_5) in ipaddrs:
+            key = h, p, ssl
+            conn = None
+            conns = self.connections.get(key)
+            while conns:
+                conn = conns.pop(0)
+                self.queue.remove(conn)
+                if not conns:
+                    del self.connections[key]
+                if conn.stale():
+                    self.log(1, 'closing stale connection for', key)
+                    conn.close()  # Just in case.
+                else:
+                    self.log(1, '* Reusing pooled connection', key,
+                                'FD =', conn.fileno())
+                    return conn
+
+        # Create a new connection.
+        conn = Connection(self.log, self, host, port, ssl)
+        await conn.connect()
+        self.log(1, '* New connection', conn.key, 'FD =', conn.fileno())
+        return conn
+
+    def recycle_connection(self, conn: 'Connection') -> None:
+        """Make a connection available for reuse.
+
+        This also prunes the pool if it exceeds the size limits.
+        """
+        if conn.stale():
+            conn.close()
+            return
+
+        key = conn.key
+        conns = self.connections.setdefault(key, [])
+        conns.append(conn)
+        self.queue.append(conn)
+
+        if len(conns) <= self.max_tasks and len(self.queue) <= self.max_pool:
+            return
+
+        # Prune the queue.
+
+        # Close stale connections for this key first.
+        stale = [conn for conn in conns if conn.stale()]
+        if stale:
+            for conn in stale:
+                conns.remove(conn)
+                self.queue.remove(conn)
+                self.log(1, 'closing stale connection for', key)
+                conn.close()
+            if not conns:
+                del self.connections[key]
+
+        # Close oldest connection(s) for this key if limit reached.
+        while len(conns) > self.max_tasks:
+            conn = conns.pop(0)
+            self.queue.remove(conn)
+            self.log(1, 'closing oldest connection for', key)
+            conn.close()
+
+        if len(self.queue) <= self.max_pool:
+            return
+
+        # Close overall stale connections.
+        stale = [conn for conn in self.queue if conn.stale()]
+        if stale:
+            for conn in stale:
+                conns = self.connections.get(conn.key)
+                conns.remove(conn)
+                self.queue.remove(conn)
+                self.log(1, 'closing stale connection for', key)
+                conn.close()
+
+        # Close oldest overall connection(s) if limit reached.
+        while len(self.queue) > self.max_pool:
+            conn = self.queue.pop(0)
+            conns = self.connections.get(conn.key)
+            c = conns.pop(0)
+            assert conn == c, (conn.key, conn, c, conns)
+            self.log(1, 'closing overall oldest connection for', conn.key)
+            conn.close()
+
+
+class Connection:
+
+    def __init__(self, log: Logger, pool: ConnectionPool, host: str, port: int, ssl: bool) -> None:
+        self.log = log
+        self.pool = pool
+        self.host = host
+        self.port = port
+        self.ssl = ssl
+        self.reader = None  # type: asyncio.StreamReader
+        self.writer = None  # type: asyncio.StreamWriter
+        self.key = None  # type: KeyTuple
+
+    def stale(self) -> bool:
+        return self.reader is None or self.reader.at_eof()
+
+    def fileno(self) -> Optional[int]:
+        writer = self.writer
+        if writer is not None:
+            transport = writer.transport
+            if transport is not None:
+                sock = transport.get_extra_info('socket')
+                if sock is not None:
+                    return sock.fileno()
+        return None
+
+    async def connect(self) -> None:
+        self.reader, self.writer = await asyncio.open_connection(
+            self.host, self.port, ssl=self.ssl)
+        peername = self.writer.get_extra_info('peername')
+        if peername:
+            self.host, self.port = peername[:2]
+        else:
+            self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl)
+        self.key = self.host, self.port, self.ssl
+
+    def close(self, recycle: bool = False) -> None:
+        if recycle and not self.stale():
+            self.pool.recycle_connection(self)
+        else:
+            self.writer.close()
+            self.pool = self.reader = self.writer = None
+
+
+class Request:
+    """HTTP request.
+
+    Use connect() to open a connection; send_request() to send the
+    request; get_response() to receive the response headers.
+    """
+
+    def __init__(self, log: Logger, url: str, pool: ConnectionPool) -> None:
+        self.log = log
+        self.url = url
+        self.pool = pool
+        self.parts = urllib.parse.urlparse(self.url)
+        self.scheme = self.parts.scheme
+        assert self.scheme in ('http', 'https'), repr(url)
+        self.ssl = self.parts.scheme == 'https'
+        self.netloc = self.parts.netloc
+        self.hostname = self.parts.hostname
+        self.port = self.parts.port or (443 if self.ssl else 80)
+        self.path = (self.parts.path or '/')
+        self.query = self.parts.query
+        if self.query:
+            self.full_path = '%s?%s' % (self.path, self.query)
+        else:
+            self.full_path = self.path
+        self.http_version = 'HTTP/1.1'
+        self.method = 'GET'
+        self.headers = []  # type: List[Tuple[str, str]]
+        self.conn = None  # type: Connection
+
+    async def connect(self) -> None:
+        """Open a connection to the server."""
+        self.log(1, '* Connecting to %s:%s using %s for %s' %
+                    (self.hostname, self.port,
+                     'ssl' if self.ssl else 'tcp',
+                     self.url))
+        self.conn = await self.pool.get_connection(self.hostname,
+                                                        self.port, self.ssl)
+
+    def close(self, recycle: bool = False) -> None:
+        """Close the connection, recycle if requested."""
+        if self.conn is not None:
+            if not recycle:
+                self.log(1, 'closing connection for', self.conn.key)
+            self.conn.close(recycle)
+            self.conn = None
+
+    async def putline(self, line: str) -> None:
+        """Write a line to the connection.
+
+        Used for the request line and headers.
+        """
+        self.log(2, '>', line)
+        self.conn.writer.write(line.encode('latin-1') + b'\r\n')
+
+    async def send_request(self) -> None:
+        """Send the request."""
+        request_line = '%s %s %s' % (self.method, self.full_path,
+                                     self.http_version)
+        await self.putline(request_line)
+        # TODO: What if a header is already set?
+        self.headers.append(('User-Agent', 'asyncio-example-crawl/0.0'))
+        self.headers.append(('Host', self.netloc))
+        self.headers.append(('Accept', '*/*'))
+        # self.headers.append(('Accept-Encoding', 'gzip'))
+        for key, value in self.headers:
+            line = '%s: %s' % (key, value)
+            await self.putline(line)
+        await self.putline('')
+
+    async def get_response(self) -> 'Response':
+        """Receive the response."""
+        response = Response(self.log, self.conn.reader)
+        await response.read_headers()
+        return response
+
+
+class Response:
+    """HTTP response.
+
+    Call read_headers() to receive the request headers.  Then check
+    the status attribute and call get_header() to inspect the headers.
+    Finally call read() to receive the body.
+    """
+
+    def __init__(self, log: Logger, reader: asyncio.StreamReader) -> None:
+        self.log = log
+        self.reader = reader
+        self.http_version = None  # type: str  # 'HTTP/1.1'
+        self.status = None  # type: int  # 200
+        self.reason = None  # type: str  # 'Ok'
+        self.headers = []  # type: List[Tuple[str, str]]  # [('Content-Type', 'text/html')]
+
+    async def getline(self) -> str:
+        """Read one line from the connection."""
+        line = (await self.reader.readline()).decode('latin-1').rstrip()
+        self.log(2, '<', line)
+        return line
+
+    async def read_headers(self) -> None:
+        """Read the response status and the request headers."""
+        status_line = await self.getline()
+        status_parts = status_line.split(None, 2)
+        if len(status_parts) != 3:
+            self.log(0, 'bad status_line', repr(status_line))
+            raise BadStatusLine(status_line)
+        self.http_version, status, self.reason = status_parts
+        self.status = int(status)
+        while True:
+            header_line = await self.getline()
+            if not header_line:
+                break
+            # TODO: Continuation lines.
+            key, value = header_line.split(':', 1)
+            self.headers.append((key, value.strip()))
+
+    def get_redirect_url(self, default: str = '') -> str:
+        """Inspect the status and return the redirect url if appropriate."""
+        if self.status not in (300, 301, 302, 303, 307):
+            return default
+        return self.get_header('Location', default)
+
+    def get_header(self, key: str, default: str = '') -> str:
+        """Get one header value, using a case insensitive header name."""
+        key = key.lower()
+        for k, v in self.headers:
+            if k.lower() == key:
+                return v
+        return default
+
+    async def read(self) -> bytes:
+        """Read the response body.
+
+        This honors Content-Length and Transfer-Encoding: chunked.
+        """
+        nbytes = None
+        for key, value in self.headers:
+            if key.lower() == 'content-length':
+                nbytes = int(value)
+                break
+        if nbytes is None:
+            if self.get_header('transfer-encoding').lower() == 'chunked':
+                self.log(2, 'parsing chunked response')
+                blocks = []
+                while True:
+                    size_header = await self.reader.readline()
+                    if not size_header:
+                        self.log(0, 'premature end of chunked response')
+                        break
+                    self.log(3, 'size_header =', repr(size_header))
+                    parts = size_header.split(b';')
+                    size = int(parts[0], 16)
+                    if size:
+                        self.log(3, 'reading chunk of', size, 'bytes')
+                        block = await self.reader.readexactly(size)
+                        assert len(block) == size, (len(block), size)
+                        blocks.append(block)
+                    crlf = await self.reader.readline()
+                    assert crlf == b'\r\n', repr(crlf)
+                    if not size:
+                        break
+                body = b''.join(blocks)
+                self.log(1, 'chunked response had', len(body),
+                            'bytes in', len(blocks), 'blocks')
+            else:
+                self.log(3, 'reading until EOF')
+                body = await self.reader.read()
+                # TODO: Should make sure not to recycle the connection
+                # in this case.
+        else:
+            body = await self.reader.readexactly(nbytes)
+        return body
+
+
+class Fetcher:
+    """Logic and state for one URL.
+
+    When found in crawler.busy, this represents a URL to be fetched or
+    in the process of being fetched; when found in crawler.done, this
+    holds the results from fetching it.
+
+    This is usually associated with a task.  This references the
+    crawler for the connection pool and to add more URLs to its todo
+    list.
+
+    Call fetch() to do the fetching, then report() to print the results.
+    """
+
+    def __init__(self, log: Logger, url: str, crawler: 'Crawler',
+                 max_redirect: int = 10, max_tries: int = 4) -> None:
+        self.log = log
+        self.url = url
+        self.crawler = crawler
+        # We don't loop resolving redirects here -- we just use this
+        # to decide whether to add the redirect URL to crawler.todo.
+        self.max_redirect = max_redirect
+        # But we do loop to retry on errors a few times.
+        self.max_tries = max_tries
+        # Everything we collect from the response goes here.
+        self.task = None  # type: asyncio.Task
+        self.exceptions = []  # type: List[Exception]
+        self.tries = 0
+        self.request = None  # type: Request
+        self.response = None  # type: Response
+        self.body = None  # type: bytes
+        self.next_url = None  # type: str
+        self.ctype = None  # type: str
+        self.pdict = None  # type: Dict[str, str]
+        self.encoding = None  # type: str
+        self.urls = None  # type: Set[str]
+        self.new_urls = None  # type: Set[str]
+
+    async def fetch(self) -> None:
+        """Attempt to fetch the contents of the URL.
+
+        If successful, and the data is HTML, extract further links and
+        add them to the crawler.  Redirects are also added back there.
+        """
+        while self.tries < self.max_tries:
+            self.tries += 1
+            self.request = None
+            try:
+                self.request = Request(self.log, self.url, self.crawler.pool)
+                await self.request.connect()
+                await self.request.send_request()
+                self.response = await self.request.get_response()
+                self.body = await self.response.read()
+                h_conn = self.response.get_header('connection').lower()
+                if h_conn != 'close':
+                    self.request.close(recycle=True)
+                    self.request = None
+                if self.tries > 1:
+                    self.log(1, 'try', self.tries, 'for', self.url, 'success')
+                break
+            except (BadStatusLine, OSError) as exc:
+                self.exceptions.append(exc)
+                self.log(1, 'try', self.tries, 'for', self.url,
+                            'raised', repr(exc))
+                # import pdb; pdb.set_trace()
+                # Don't reuse the connection in this case.
+            finally:
+                if self.request is not None:
+                    self.request.close()
+        else:
+            # We never broke out of the while loop, i.e. all tries failed.
+            self.log(0, 'no success for', self.url,
+                        'in', self.max_tries, 'tries')
+            return
+        next_url = self.response.get_redirect_url()
+        if next_url:
+            self.next_url = urllib.parse.urljoin(self.url, next_url)
+            if self.max_redirect > 0:
+                self.log(1, 'redirect to', self.next_url, 'from', self.url)
+                self.crawler.add_url(self.next_url, self.max_redirect - 1)
+            else:
+                self.log(0, 'redirect limit reached for', self.next_url,
+                            'from', self.url)
+        else:
+            if self.response.status == 200:
+                self.ctype = self.response.get_header('content-type')
+                self.pdict = {}
+                if self.ctype:
+                    self.ctype, self.pdict = cgi.parse_header(self.ctype)
+                self.encoding = self.pdict.get('charset', 'utf-8')
+                if self.ctype == 'text/html':
+                    body = self.body.decode(self.encoding, 'replace')
+                    # Replace href with (?:href|src) to follow image links.
+                    self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)',
+                                               body))
+                    if self.urls:
+                        self.log(1, 'got', len(self.urls),
+                                    'distinct urls from', self.url)
+                    self.new_urls = set()
+                    for url in self.urls:
+                        url = unescape(url)
+                        url = urllib.parse.urljoin(self.url, url)
+                        url, frag = urllib.parse.urldefrag(url)
+                        if self.crawler.add_url(url):
+                            self.new_urls.add(url)
+
+    def report(self, stats: 'Stats', file: IO[str] = None) -> None:
+        """Print a report on the state for this URL.
+
+        Also update the Stats instance.
+        """
+        if self.task is not None:
+            if not self.task.done():
+                stats.add('pending')
+                print(self.url, 'pending', file=file)
+                return
+            elif self.task.cancelled():
+                stats.add('cancelled')
+                print(self.url, 'cancelled', file=file)
+                return
+            elif self.task.exception():
+                stats.add('exception')
+                exc = self.task.exception()
+                stats.add('exception_' + exc.__class__.__name__)
+                print(self.url, exc, file=file)
+                return
+        if len(self.exceptions) == self.tries:
+            stats.add('fail')
+            exc = self.exceptions[-1]
+            stats.add('fail_' + str(exc.__class__.__name__))
+            print(self.url, 'error', exc, file=file)
+        elif self.next_url:
+            stats.add('redirect')
+            print(self.url, self.response.status, 'redirect', self.next_url,
+                  file=file)
+        elif self.ctype == 'text/html':
+            stats.add('html')
+            size = len(self.body or b'')
+            stats.add('html_bytes', size)
+            if self.log.level:
+                print(self.url, self.response.status,
+                      self.ctype, self.encoding,
+                      size,
+                      '%d/%d' % (len(self.new_urls or ()), len(self.urls or ())),
+                      file=file)
+        elif self.response is None:
+            print(self.url, 'no response object')
+        else:
+            size = len(self.body or b'')
+            if self.response.status == 200:
+                stats.add('other')
+                stats.add('other_bytes', size)
+            else:
+                stats.add('error')
+                stats.add('error_bytes', size)
+                stats.add('status_%s' % self.response.status)
+            print(self.url, self.response.status,
+                  self.ctype, self.encoding,
+                  size,
+                  file=file)
+
+
+class Stats:
+    """Record stats of various sorts."""
+
+    def __init__(self) -> None:
+        self.stats = {}  # type: Dict[str, int]
+
+    def add(self, key: str, count: int = 1) -> None:
+        self.stats[key] = self.stats.get(key, 0) + count
+
+    def report(self, file: IO[str] = None) -> None:
+        for key, count in sorted(self.stats.items()):
+            print('%10d' % count, key, file=file)
+
+
+class Crawler:
+    """Crawl a set of URLs.
+
+    This manages three disjoint sets of URLs (todo, busy, done).  The
+    data structures actually store dicts -- the values in todo give
+    the redirect limit, while the values in busy and done are Fetcher
+    instances.
+    """
+    def __init__(self, log: Logger,
+                 roots: Set[str], exclude: str = None, strict: bool = True,  # What to crawl.
+                 max_redirect: int = 10, max_tries: int = 4,  # Per-url limits.
+                 max_tasks: int = 10, max_pool: int = 10,  # Global limits.
+                 ) -> None:
+        self.log = log
+        self.roots = roots
+        self.exclude = exclude
+        self.strict = strict
+        self.max_redirect = max_redirect
+        self.max_tries = max_tries
+        self.max_tasks = max_tasks
+        self.max_pool = max_pool
+        self.todo = {}  # type: Dict[str, int]
+        self.busy = {}  # type: Dict[str, Fetcher]
+        self.done = {}  # type: Dict[str, Fetcher]
+        self.pool = ConnectionPool(self.log, max_pool, max_tasks)
+        self.root_domains = set()  # type: Set[str]
+        for root in roots:
+            host = urllib.parse.urlparse(root).hostname
+            if not host:
+                continue
+            if re.match(r'\A[\d\.]*\Z', host):
+                self.root_domains.add(host)
+            else:
+                host = host.lower()
+                if self.strict:
+                    self.root_domains.add(host)
+                    if host.startswith('www.'):
+                        self.root_domains.add(host[4:])
+                    else:
+                        self.root_domains.add('www.' + host)
+                else:
+                    parts = host.split('.')
+                    if len(parts) > 2:
+                        host = '.'.join(parts[-2:])
+                    self.root_domains.add(host)
+        for root in roots:
+            self.add_url(root)
+        self.governor = asyncio.Semaphore(max_tasks)
+        self.termination = asyncio.Condition()
+        self.t0 = time.time()
+        self.t1 = None  # type: Optional[float]
+
+    def close(self) -> None:
+        """Close resources (currently only the pool)."""
+        self.pool.close()
+
+    def host_okay(self, host: str) -> bool:
+        """Check if a host should be crawled.
+
+        A literal match (after lowercasing) is always good.  For hosts
+        that don't look like IP addresses, some approximate matches
+        are okay depending on the strict flag.
+        """
+        host = host.lower()
+        if host in self.root_domains:
+            return True
+        if re.match(r'\A[\d\.]*\Z', host):
+            return False
+        if self.strict:
+            return self._host_okay_strictish(host)
+        else:
+            return self._host_okay_lenient(host)
+
+    def _host_okay_strictish(self, host: str) -> bool:
+        """Check if a host should be crawled, strict-ish version.
+
+        This checks for equality modulo an initial 'www.' component.
+         """
+        if host.startswith('www.'):
+            if host[4:] in self.root_domains:
+                return True
+        else:
+            if 'www.' + host in self.root_domains:
+                return True
+        return False
+
+    def _host_okay_lenient(self, host: str) -> bool:
+        """Check if a host should be crawled, lenient version.
+
+        This compares the last two components of the host.
+        """
+        parts = host.split('.')
+        if len(parts) > 2:
+            host = '.'.join(parts[-2:])
+        return host in self.root_domains
+
+    def add_url(self, url: str, max_redirect: int = None) -> bool:
+        """Add a URL to the todo list if not seen before."""
+        if self.exclude and re.search(self.exclude, url):
+            return False
+        parsed = urllib.parse.urlparse(url)
+        if parsed.scheme not in ('http', 'https'):
+            self.log(2, 'skipping non-http scheme in', url)
+            return False
+        host = parsed.hostname
+        if not self.host_okay(host):
+            self.log(2, 'skipping non-root host in', url)
+            return False
+        if max_redirect is None:
+            max_redirect = self.max_redirect
+        if url in self.todo or url in self.busy or url in self.done:
+            return False
+        self.log(1, 'adding', url, max_redirect)
+        self.todo[url] = max_redirect
+        return True
+
+    async def crawl(self) -> None:
+        """Run the crawler until all finished."""
+        with (await self.termination):
+            while self.todo or self.busy:
+                if self.todo:
+                    url, max_redirect = self.todo.popitem()
+                    fetcher = Fetcher(self.log, url,
+                                      crawler=self,
+                                      max_redirect=max_redirect,
+                                      max_tries=self.max_tries,
+                                      )
+                    self.busy[url] = fetcher
+                    fetcher.task = asyncio.Task(self.fetch(fetcher))
+                else:
+                    await self.termination.wait()
+        self.t1 = time.time()
+
+    async def fetch(self, fetcher: Fetcher) -> None:
+        """Call the Fetcher's fetch(), with a limit on concurrency.
+
+        Once this returns, move the fetcher from busy to done.
+        """
+        url = fetcher.url
+        with (await self.governor):
+            try:
+                await fetcher.fetch()  # Fetcher gonna fetch.
+            finally:
+                # Force GC of the task, so the error is logged.
+                fetcher.task = None
+        with (await self.termination):
+            self.done[url] = fetcher
+            del self.busy[url]
+            self.termination.notify()
+
+    def report(self, file: IO[str] = None) -> None:
+        """Print a report on all completed URLs."""
+        if self.t1 is None:
+            self.t1 = time.time()
+        dt = self.t1 - self.t0
+        if dt and self.max_tasks:
+            speed = len(self.done) / dt / self.max_tasks
+        else:
+            speed = 0
+        stats = Stats()
+        print('*** Report ***', file=file)
+        try:
+            show = []  # type: List[Tuple[str, Fetcher]]
+            show.extend(self.done.items())
+            show.extend(self.busy.items())
+            show.sort()
+            for url, fetcher in show:
+                fetcher.report(stats, file=file)
+        except KeyboardInterrupt:
+            print('\nInterrupted', file=file)
+        print('Finished', len(self.done),
+              'urls in %.3f secs' % dt,
+              '(max_tasks=%d)' % self.max_tasks,
+              '(%.3f urls/sec/task)' % speed,
+              file=file)
+        stats.report(file=file)
+        print('Todo:', len(self.todo), file=file)
+        print('Busy:', len(self.busy), file=file)
+        print('Done:', len(self.done), file=file)
+        print('Date:', time.ctime(), 'local time', file=file)
+
+
+def main() -> None:
+    """Main program.
+
+    Parse arguments, set up event loop, run crawler, print report.
+    """
+    args = ARGS.parse_args()
+    if not args.roots:
+        print('Use --help for command line help')
+        return
+
+    log = Logger(args.level)
+
+    if args.iocp:
+        if sys.platform == 'win32':
+            from asyncio import ProactorEventLoop
+            loop = ProactorEventLoop()  # type: ignore
+            asyncio.set_event_loop(loop)
+        else:
+            assert False
+    elif args.select:
+        loop = asyncio.SelectorEventLoop()  # type: ignore
+        asyncio.set_event_loop(loop)
+    else:
+        loop = asyncio.get_event_loop()
+
+    roots = {fix_url(root) for root in args.roots}
+
+    crawler = Crawler(log,
+                      roots, exclude=args.exclude,
+                      strict=args.strict,
+                      max_redirect=args.max_redirect,
+                      max_tries=args.max_tries,
+                      max_tasks=args.max_tasks,
+                      max_pool=args.max_pool,
+                      )
+    try:
+        loop.run_until_complete(crawler.crawl())  # Crawler gonna crawl.
+    except KeyboardInterrupt:
+        sys.stderr.flush()
+        print('\nInterrupted\n')
+    finally:
+        crawler.report()
+        crawler.close()
+        loop.close()
+
+
+if __name__ == '__main__':
+    logging.basicConfig(level=logging.INFO) # type: ignore
+    main()
diff --git a/test-data/samples/dict.py b/test-data/samples/dict.py
new file mode 100644
index 0000000..d74a5b5
--- /dev/null
+++ b/test-data/samples/dict.py
@@ -0,0 +1,8 @@
+import typing
+prices = {'apple': 0.40, 'banana': 0.50}
+my_purchase = {
+    'apple': 1,
+    'banana': 6}
+grocery_bill = sum(prices[fruit] * my_purchase[fruit]
+                   for fruit in my_purchase)
+print('I owe the grocer $%.2f' % grocery_bill)
diff --git a/test-data/samples/fib.py b/test-data/samples/fib.py
new file mode 100644
index 0000000..26248c8
--- /dev/null
+++ b/test-data/samples/fib.py
@@ -0,0 +1,5 @@
+import typing
+parents, babies = (1, 1)
+while babies < 100:
+    print('This generation has {0} babies'.format(babies))
+    parents, babies = (babies, parents + babies)
diff --git a/test-data/samples/files.py b/test-data/samples/files.py
new file mode 100644
index 0000000..f540c7c
--- /dev/null
+++ b/test-data/samples/files.py
@@ -0,0 +1,14 @@
+# indent your Python code to put into an email
+import glob
+import typing
+# glob supports Unix style pathname extensions
+python_files = glob.glob('*.py')
+for file_name in sorted(python_files):
+    print('    ------' + file_name)
+
+    f = open(file_name)
+    for line in f:
+        print('    ' + line.rstrip())
+    f.close()
+
+    print()
diff --git a/test-data/samples/for.py b/test-data/samples/for.py
new file mode 100644
index 0000000..f7eeed4
--- /dev/null
+++ b/test-data/samples/for.py
@@ -0,0 +1,4 @@
+import typing
+friends = ['john', 'pat', 'gary', 'michael']
+for i, name in enumerate(friends):
+    print("iteration {iteration} is {name}".format(iteration=i, name=name))
diff --git a/test-data/samples/generators.py b/test-data/samples/generators.py
new file mode 100644
index 0000000..9150c96
--- /dev/null
+++ b/test-data/samples/generators.py
@@ -0,0 +1,24 @@
+# Prime number sieve with generators
+
+import itertools
+from typing import Iterator
+
+
+def iter_primes() -> Iterator[int]:
+    # an iterator of all numbers between 2 and +infinity
+    numbers = itertools.count(2)
+
+    # generate primes forever
+    while True:
+        # get the first number from the iterator (always a prime)
+        prime = next(numbers)
+        yield prime
+
+        # this code iteratively builds up a chain of
+        # filters...slightly tricky, but ponder it a bit
+        numbers = filter(prime.__rmod__, numbers)
+
+for p in iter_primes():
+    if p > 1000:
+        break
+    print(p)
diff --git a/test-data/samples/greet.py b/test-data/samples/greet.py
new file mode 100644
index 0000000..47e7626
--- /dev/null
+++ b/test-data/samples/greet.py
@@ -0,0 +1,8 @@
+import typing
+
+
+def greet(name: str) -> None:
+    print('Hello', name)
+greet('Jack')
+greet('Jill')
+greet('Bob')
diff --git a/test-data/samples/guess.py b/test-data/samples/guess.py
new file mode 100644
index 0000000..d3f1cee
--- /dev/null
+++ b/test-data/samples/guess.py
@@ -0,0 +1,32 @@
+# "Guess the Number" Game (edited) from http://inventwithpython.com
+
+import random
+import typing
+
+guesses_made = 0
+
+name = input('Hello! What is your name?\n')
+
+number = random.randint(1, 20)
+print('Well, {0}, I am thinking of a number between 1 and 20.'.format(name))
+
+while guesses_made < 6:
+
+    guess = int(input('Take a guess: '))
+
+    guesses_made += 1
+
+    if guess < number:
+        print('Your guess is too low.')
+
+    if guess > number:
+        print('Your guess is too high.')
+
+    if guess == number:
+        break
+
+if guess == number:
+    print('Good job, {0}! You guessed my number in {1} guesses!'.format(
+          name, guesses_made))
+else:
+    print('Nope. The number I was thinking of was {0}'.format(number))
diff --git a/test-data/samples/hello.py b/test-data/samples/hello.py
new file mode 100644
index 0000000..6c0b2ca
--- /dev/null
+++ b/test-data/samples/hello.py
@@ -0,0 +1,2 @@
+import typing
+print('Hello, world')
diff --git a/test-data/samples/input.py b/test-data/samples/input.py
new file mode 100644
index 0000000..cca9233
--- /dev/null
+++ b/test-data/samples/input.py
@@ -0,0 +1,3 @@
+import typing
+name = input('What is your name?\n')
+print('Hi, %s.' % name)
diff --git a/test-data/samples/itertool.py b/test-data/samples/itertool.py
new file mode 100644
index 0000000..9ee2475
--- /dev/null
+++ b/test-data/samples/itertool.py
@@ -0,0 +1,16 @@
+from itertools import groupby
+import typing
+lines = '''
+This is the
+first paragraph.
+
+This is the second.
+'''.splitlines()
+# Use itertools.groupby and bool to return groups of
+# consecutive lines that either have content or don't.
+for has_chars, frags in groupby(lines, bool):
+    if has_chars:
+        print(' '.join(frags))
+# PRINTS:
+# This is the first paragraph.
+# This is the second.
diff --git a/test-data/samples/readme.txt b/test-data/samples/readme.txt
new file mode 100644
index 0000000..5889a8e
--- /dev/null
+++ b/test-data/samples/readme.txt
@@ -0,0 +1,25 @@
+Mypy Sample Programs
+--------------------
+
+The sample programs use static typing unless otherwise noted in comments.
+
+Original credits for sample programs:
+
+  fib.py - Python Wiki [1]
+  for.py - Python Wiki [1]
+  greet.py - Python Wiki [1]
+  hello.py - Python Wiki [1]
+  input.py - Python Wiki [1]
+  regexp.py - Python Wiki [1]
+  dict.py - Python Wiki [1]
+  cmdline.py - Python Wiki [1]
+  files.py - Python Wiki [1]
+  bottles.py - Python Wiki [1]
+  class.py - Python Wiki [1]
+  guess.py - Python Wiki [1]
+  generators.py - Python Wiki [1]
+  itertool.py - Python Wiki [1]
+
+The sample programs were ported to mypy by Jukka Lehtosalo.
+
+[1] http://wiki.python.org/moin/SimplePrograms
diff --git a/test-data/samples/regexp.py b/test-data/samples/regexp.py
new file mode 100644
index 0000000..6d8d799
--- /dev/null
+++ b/test-data/samples/regexp.py
@@ -0,0 +1,7 @@
+import typing
+import re
+for test_string in ['555-1212', 'ILL-EGAL']:
+    if re.match(r'^\d{3}-\d{4}$', test_string):
+        print(test_string, 'is a valid US local phone number')
+    else:
+        print(test_string, 'rejected')
diff --git a/test-data/stdlib-samples/3.2/base64.py b/test-data/stdlib-samples/3.2/base64.py
new file mode 100644
index 0000000..ef91964
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/base64.py
@@ -0,0 +1,411 @@
+#! /usr/bin/env python3
+
+"""RFC 3548: Base16, Base32, Base64 Data Encodings"""
+
+# Modified 04-Oct-1995 by Jack Jansen to use binascii module
+# Modified 30-Dec-2003 by Barry Warsaw to add full RFC 3548 support
+# Modified 22-May-2007 by Guido van Rossum to use bytes everywhere
+
+import re
+import struct
+import binascii
+
+from typing import Dict, List, AnyStr, IO
+
+
+__all__ = [
+    # Legacy interface exports traditional RFC 1521 Base64 encodings
+    'encode', 'decode', 'encodebytes', 'decodebytes',
+    # Generalized interface for other encodings
+    'b64encode', 'b64decode', 'b32encode', 'b32decode',
+    'b16encode', 'b16decode',
+    # Standard Base64 encoding
+    'standard_b64encode', 'standard_b64decode',
+    # Some common Base64 alternatives.  As referenced by RFC 3458, see thread
+    # starting at:
+    #
+    # http://zgp.org/pipermail/p2p-hackers/2001-September/000316.html
+    'urlsafe_b64encode', 'urlsafe_b64decode',
+    ]
+
+
+bytes_types = (bytes, bytearray)  # Types acceptable as binary data
+
+
+def _translate(s: bytes, altchars: Dict[AnyStr, bytes]) -> bytes:
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    translation = bytearray(range(256))
+    for k, v in altchars.items():
+        translation[ord(k)] = v[0]
+    return s.translate(translation)
+
+
+
+# Base64 encoding/decoding uses binascii
+
+def b64encode(s: bytes, altchars: bytes = None) -> bytes:
+    """Encode a byte string using Base64.
+
+    s is the byte string to encode.  Optional altchars must be a byte
+    string of length 2 which specifies an alternative alphabet for the
+    '+' and '/' characters.  This allows an application to
+    e.g. generate url or filesystem safe Base64 strings.
+
+    The encoded byte string is returned.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    # Strip off the trailing newline
+    encoded = binascii.b2a_base64(s)[:-1]
+    if altchars is not None:
+        if not isinstance(altchars, bytes_types):
+            raise TypeError("expected bytes, not %s"
+                            % altchars.__class__.__name__)
+        assert len(altchars) == 2, repr(altchars)
+        return _translate(encoded, {'+': altchars[0:1], '/': altchars[1:2]})
+    return encoded
+
+
+def b64decode(s: bytes, altchars: bytes = None,
+              validate: bool = False) -> bytes:
+    """Decode a Base64 encoded byte string.
+
+    s is the byte string to decode.  Optional altchars must be a
+    string of length 2 which specifies the alternative alphabet used
+    instead of the '+' and '/' characters.
+
+    The decoded string is returned.  A binascii.Error is raised if s is
+    incorrectly padded.
+
+    If validate is False (the default), non-base64-alphabet characters are
+    discarded prior to the padding check.  If validate is True,
+    non-base64-alphabet characters in the input result in a binascii.Error.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    if altchars is not None:
+        if not isinstance(altchars, bytes_types):
+            raise TypeError("expected bytes, not %s"
+                            % altchars.__class__.__name__)
+        assert len(altchars) == 2, repr(altchars)
+        s = _translate(s, {chr(altchars[0]): b'+', chr(altchars[1]): b'/'})
+    if validate and not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s):
+        raise binascii.Error('Non-base64 digit found')
+    return binascii.a2b_base64(s)
+
+
+def standard_b64encode(s: bytes) -> bytes:
+    """Encode a byte string using the standard Base64 alphabet.
+
+    s is the byte string to encode.  The encoded byte string is returned.
+    """
+    return b64encode(s)
+
+def standard_b64decode(s: bytes) -> bytes:
+    """Decode a byte string encoded with the standard Base64 alphabet.
+
+    s is the byte string to decode.  The decoded byte string is
+    returned.  binascii.Error is raised if the input is incorrectly
+    padded or if there are non-alphabet characters present in the
+    input.
+    """
+    return b64decode(s)
+
+def urlsafe_b64encode(s: bytes) -> bytes:
+    """Encode a byte string using a url-safe Base64 alphabet.
+
+    s is the byte string to encode.  The encoded byte string is
+    returned.  The alphabet uses '-' instead of '+' and '_' instead of
+    '/'.
+    """
+    return b64encode(s, b'-_')
+
+def urlsafe_b64decode(s: bytes) -> bytes:
+    """Decode a byte string encoded with the standard Base64 alphabet.
+
+    s is the byte string to decode.  The decoded byte string is
+    returned.  binascii.Error is raised if the input is incorrectly
+    padded or if there are non-alphabet characters present in the
+    input.
+
+    The alphabet uses '-' instead of '+' and '_' instead of '/'.
+    """
+    return b64decode(s, b'-_')
+
+
+
+# Base32 encoding/decoding must be done in Python
+_b32alphabet = {
+    0: b'A',  9: b'J', 18: b'S', 27: b'3',
+    1: b'B', 10: b'K', 19: b'T', 28: b'4',
+    2: b'C', 11: b'L', 20: b'U', 29: b'5',
+    3: b'D', 12: b'M', 21: b'V', 30: b'6',
+    4: b'E', 13: b'N', 22: b'W', 31: b'7',
+    5: b'F', 14: b'O', 23: b'X',
+    6: b'G', 15: b'P', 24: b'Y',
+    7: b'H', 16: b'Q', 25: b'Z',
+    8: b'I', 17: b'R', 26: b'2',
+    }
+
+_b32tab = [v[0] for k, v in sorted(_b32alphabet.items())]
+_b32rev = dict([(v[0], k) for k, v in _b32alphabet.items()])
+
+
+def b32encode(s: bytes) -> bytes:
+    """Encode a byte string using Base32.
+
+    s is the byte string to encode.  The encoded byte string is returned.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    quanta, leftover = divmod(len(s), 5)
+    # Pad the last quantum with zero bits if necessary
+    if leftover:
+        s = s + bytes(5 - leftover)  # Don't use += !
+        quanta += 1
+    encoded = bytes()
+    for i in range(quanta):
+        # c1 and c2 are 16 bits wide, c3 is 8 bits wide.  The intent of this
+        # code is to process the 40 bits in units of 5 bits.  So we take the 1
+        # leftover bit of c1 and tack it onto c2.  Then we take the 2 leftover
+        # bits of c2 and tack them onto c3.  The shifts and masks are intended
+        # to give us values of exactly 5 bits in width.
+        c1, c2, c3 = struct.unpack('!HHB', s[i*5:(i+1)*5]) # type: (int, int, int)
+        c2 += (c1 & 1) << 16 # 17 bits wide
+        c3 += (c2 & 3) << 8  # 10 bits wide
+        encoded += bytes([_b32tab[c1 >> 11],         # bits 1 - 5
+                          _b32tab[(c1 >> 6) & 0x1f], # bits 6 - 10
+                          _b32tab[(c1 >> 1) & 0x1f], # bits 11 - 15
+                          _b32tab[c2 >> 12],         # bits 16 - 20 (1 - 5)
+                          _b32tab[(c2 >> 7) & 0x1f], # bits 21 - 25 (6 - 10)
+                          _b32tab[(c2 >> 2) & 0x1f], # bits 26 - 30 (11 - 15)
+                          _b32tab[c3 >> 5],          # bits 31 - 35 (1 - 5)
+                          _b32tab[c3 & 0x1f],        # bits 36 - 40 (1 - 5)
+                          ])
+    # Adjust for any leftover partial quanta
+    if leftover == 1:
+        return encoded[:-6] + b'======'
+    elif leftover == 2:
+        return encoded[:-4] + b'===='
+    elif leftover == 3:
+        return encoded[:-3] + b'==='
+    elif leftover == 4:
+        return encoded[:-1] + b'='
+    return encoded
+
+
+def b32decode(s: bytes, casefold: bool = False, map01: bytes = None) -> bytes:
+    """Decode a Base32 encoded byte string.
+
+    s is the byte string to decode.  Optional casefold is a flag
+    specifying whether a lowercase alphabet is acceptable as input.
+    For security purposes, the default is False.
+
+    RFC 3548 allows for optional mapping of the digit 0 (zero) to the
+    letter O (oh), and for optional mapping of the digit 1 (one) to
+    either the letter I (eye) or letter L (el).  The optional argument
+    map01 when not None, specifies which letter the digit 1 should be
+    mapped to (when map01 is not None, the digit 0 is always mapped to
+    the letter O).  For security purposes the default is None, so that
+    0 and 1 are not allowed in the input.
+
+    The decoded byte string is returned.  binascii.Error is raised if
+    the input is incorrectly padded or if there are non-alphabet
+    characters present in the input.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    quanta, leftover = divmod(len(s), 8)
+    if leftover:
+        raise binascii.Error('Incorrect padding')
+    # Handle section 2.4 zero and one mapping.  The flag map01 will be either
+    # False, or the character to map the digit 1 (one) to.  It should be
+    # either L (el) or I (eye).
+    if map01 is not None:
+        if not isinstance(map01, bytes_types):
+            raise TypeError("expected bytes, not %s" % map01.__class__.__name__)
+        assert len(map01) == 1, repr(map01)
+        s = _translate(s, {b'0': b'O', b'1': map01})
+    if casefold:
+        s = s.upper()
+    # Strip off pad characters from the right.  We need to count the pad
+    # characters because this will tell us how many null bytes to remove from
+    # the end of the decoded string.
+    padchars = 0
+    mo = re.search(b'(?P<pad>[=]*)$', s)
+    if mo:
+        padchars = len(mo.group('pad'))
+        if padchars > 0:
+            s = s[:-padchars]
+    # Now decode the full quanta
+    parts = []  # type: List[bytes]
+    acc = 0
+    shift = 35
+    for c in s:
+        val = _b32rev.get(c)
+        if val is None:
+            raise TypeError('Non-base32 digit found')
+        acc += _b32rev[c] << shift
+        shift -= 5
+        if shift < 0:
+            parts.append(binascii.unhexlify(bytes('%010x' % acc, "ascii")))
+            acc = 0
+            shift = 35
+    # Process the last, partial quanta
+    last = binascii.unhexlify(bytes('%010x' % acc, "ascii"))
+    if padchars == 0:
+        last = b''                      # No characters
+    elif padchars == 1:
+        last = last[:-1]
+    elif padchars == 3:
+        last = last[:-2]
+    elif padchars == 4:
+        last = last[:-3]
+    elif padchars == 6:
+        last = last[:-4]
+    else:
+        raise binascii.Error('Incorrect padding')
+    parts.append(last)
+    return b''.join(parts)
+
+
+
+# RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns
+# lowercase.  The RFC also recommends against accepting input case
+# insensitively.
+def b16encode(s: bytes) -> bytes:
+    """Encode a byte string using Base16.
+
+    s is the byte string to encode.  The encoded byte string is returned.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    return binascii.hexlify(s).upper()
+
+
+def b16decode(s: bytes, casefold: bool = False) -> bytes:
+    """Decode a Base16 encoded byte string.
+
+    s is the byte string to decode.  Optional casefold is a flag
+    specifying whether a lowercase alphabet is acceptable as input.
+    For security purposes, the default is False.
+
+    The decoded byte string is returned.  binascii.Error is raised if
+    s were incorrectly padded or if there are non-alphabet characters
+    present in the string.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    if casefold:
+        s = s.upper()
+    if re.search(b'[^0-9A-F]', s):
+        raise binascii.Error('Non-base16 digit found')
+    return binascii.unhexlify(s)
+
+
+
+# Legacy interface.  This code could be cleaned up since I don't believe
+# binascii has any line length limitations.  It just doesn't seem worth it
+# though.  The files should be opened in binary mode.
+
+MAXLINESIZE = 76 # Excluding the CRLF
+MAXBINSIZE = (MAXLINESIZE//4)*3
+
+def encode(input: IO[bytes], output: IO[bytes]) -> None:
+    """Encode a file; input and output are binary files."""
+    while True:
+        s = input.read(MAXBINSIZE)
+        if not s:
+            break
+        while len(s) < MAXBINSIZE:
+            ns = input.read(MAXBINSIZE-len(s))
+            if not ns:
+                break
+            s += ns
+        line = binascii.b2a_base64(s)
+        output.write(line)
+
+
+def decode(input: IO[bytes], output: IO[bytes]) -> None:
+    """Decode a file; input and output are binary files."""
+    while True:
+        line = input.readline()
+        if not line:
+            break
+        s = binascii.a2b_base64(line)
+        output.write(s)
+
+
+def encodebytes(s: bytes) -> bytes:
+    """Encode a bytestring into a bytestring containing multiple lines
+    of base-64 data."""
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    pieces = []  # type: List[bytes]
+    for i in range(0, len(s), MAXBINSIZE):
+        chunk = s[i : i + MAXBINSIZE]
+        pieces.append(binascii.b2a_base64(chunk))
+    return b"".join(pieces)
+
+def encodestring(s: bytes) -> bytes:
+    """Legacy alias of encodebytes()."""
+    import warnings
+    warnings.warn("encodestring() is a deprecated alias, use encodebytes()",
+                  DeprecationWarning, 2)
+    return encodebytes(s)
+
+
+def decodebytes(s: bytes) -> bytes:
+    """Decode a bytestring of base-64 data into a bytestring."""
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    return binascii.a2b_base64(s)
+
+def decodestring(s: bytes) -> bytes:
+    """Legacy alias of decodebytes()."""
+    import warnings
+    warnings.warn("decodestring() is a deprecated alias, use decodebytes()",
+                  DeprecationWarning, 2)
+    return decodebytes(s)
+
+
+# Usable as a script...
+def main() -> None:
+    """Small main program"""
+    import sys, getopt
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], 'deut')
+    except getopt.error as msg:
+        sys.stdout = sys.stderr
+        print(msg)
+        print("""usage: %s [-d|-e|-u|-t] [file|-]
+        -d, -u: decode
+        -e: encode (default)
+        -t: encode and decode string 'Aladdin:open sesame'"""%sys.argv[0])
+        sys.exit(2)
+    func = encode
+    for o, a in opts:
+        if o == '-e': func = encode
+        if o == '-d': func = decode
+        if o == '-u': func = decode
+        if o == '-t': test(); return
+    if args and args[0] != '-':
+        with open(args[0], 'rb') as f:
+            func(f, sys.stdout.buffer)
+    else:
+        func(sys.stdin.buffer, sys.stdout.buffer)
+
+
+def test() -> None:
+    s0 = b"Aladdin:open sesame"
+    print(repr(s0))
+    s1 = encodebytes(s0)
+    print(repr(s1))
+    s2 = decodebytes(s1)
+    print(repr(s2))
+    assert s0 == s2
+
+
+if __name__ == '__main__':
+    main()
diff --git a/test-data/stdlib-samples/3.2/fnmatch.py b/test-data/stdlib-samples/3.2/fnmatch.py
new file mode 100644
index 0000000..ec27b90
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/fnmatch.py
@@ -0,0 +1,112 @@
+"""Filename matching with shell patterns.
+
+fnmatch(FILENAME, PATTERN) matches according to the local convention.
+fnmatchcase(FILENAME, PATTERN) always takes case in account.
+
+The functions operate by translating the pattern into a regular
+expression.  They cache the compiled regular expressions for speed.
+
+The function translate(PATTERN) returns a regular expression
+corresponding to PATTERN.  (It does not compile it.)
+"""
+import os
+import posixpath
+import re
+import functools
+
+from typing import Iterable, List, AnyStr, Any, Callable, Match
+
+__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"]
+
+def fnmatch(name: AnyStr, pat: AnyStr) -> bool:
+    """Test whether FILENAME matches PATTERN.
+
+    Patterns are Unix shell style:
+
+    *       matches everything
+    ?       matches any single character
+    [seq]   matches any character in seq
+    [!seq]  matches any char not in seq
+
+    An initial period in FILENAME is not special.
+    Both FILENAME and PATTERN are first case-normalized
+    if the operating system requires it.
+    If you don't want this, use fnmatchcase(FILENAME, PATTERN).
+    """
+    name = os.path.normcase(name)
+    pat = os.path.normcase(pat)
+    return fnmatchcase(name, pat)
+
+ at functools.lru_cache(maxsize=250)
+def _compile_pattern(pat: AnyStr,
+                     is_bytes: bool = False) -> Callable[[AnyStr],
+                                                         Match[AnyStr]]:
+    if isinstance(pat, bytes):
+        pat_str = str(pat, 'ISO-8859-1')
+        res_str = translate(pat_str)
+        res = bytes(res_str, 'ISO-8859-1')
+    else:
+        res = translate(pat)
+    return re.compile(res).match
+
+def filter(names: Iterable[AnyStr], pat: AnyStr) -> List[AnyStr]:
+    """Return the subset of the list NAMES that match PAT."""
+    result = []  # type: List[AnyStr]
+    pat = os.path.normcase(pat)
+    match = _compile_pattern(pat, isinstance(pat, bytes))
+    if os.path is posixpath:
+        # normcase on posix is NOP. Optimize it away from the loop.
+        for name in names:
+            if match(name):
+                result.append(name)
+    else:
+        for name in names:
+            if match(os.path.normcase(name)):
+                result.append(name)
+    return result
+
+def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool:
+    """Test whether FILENAME matches PATTERN, including case.
+
+    This is a version of fnmatch() which doesn't case-normalize
+    its arguments.
+    """
+    match = _compile_pattern(pat, isinstance(pat, bytes))
+    return match(name) is not None
+
+def translate(pat: str) -> str:
+    """Translate a shell PATTERN to a regular expression.
+
+    There is no way to quote meta-characters.
+    """
+
+    i, n = 0, len(pat)
+    res = ''
+    while i < n:
+        c = pat[i]
+        i = i+1
+        if c == '*':
+            res = res + '.*'
+        elif c == '?':
+            res = res + '.'
+        elif c == '[':
+            j = i
+            if j < n and pat[j] == '!':
+                j = j+1
+            if j < n and pat[j] == ']':
+                j = j+1
+            while j < n and pat[j] != ']':
+                j = j+1
+            if j >= n:
+                res = res + '\\['
+            else:
+                stuff = pat[i:j].replace('\\','\\\\')
+                i = j+1
+                if stuff[0] == '!':
+                    stuff = '^' + stuff[1:]
+                elif stuff[0] == '^':
+                    stuff = '\\' + stuff
+                res = '%s[%s]' % (res, stuff)
+        else:
+            res = res + re.escape(c)
+    return res + '\Z(?ms)'
diff --git a/test-data/stdlib-samples/3.2/genericpath.py b/test-data/stdlib-samples/3.2/genericpath.py
new file mode 100644
index 0000000..bd1fddf
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/genericpath.py
@@ -0,0 +1,112 @@
+"""
+Path operations common to more than one OS
+Do not use directly.  The OS specific modules import the appropriate
+functions from this module themselves.
+"""
+import os
+import stat
+
+from typing import (
+    Any as Any_, List as List_, AnyStr as AnyStr_, Tuple as Tuple_
+)
+
+__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
+           'getsize', 'isdir', 'isfile']
+
+
+# Does a path exist?
+# This is false for dangling symbolic links on systems that support them.
+def exists(path: AnyStr_) -> bool:
+    """Test whether a path exists.  Returns False for broken symbolic links"""
+    try:
+        os.stat(path)
+    except os.error:
+        return False
+    return True
+
+
+# This follows symbolic links, so both islink() and isdir() can be true
+# for the same path ono systems that support symlinks
+def isfile(path: AnyStr_) -> bool:
+    """Test whether a path is a regular file"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return stat.S_ISREG(st.st_mode)
+
+
+# Is a path a directory?
+# This follows symbolic links, so both islink() and isdir()
+# can be true for the same path on systems that support symlinks
+def isdir(s: AnyStr_) -> bool:
+    """Return true if the pathname refers to an existing directory."""
+    try:
+        st = os.stat(s)
+    except os.error:
+        return False
+    return stat.S_ISDIR(st.st_mode)
+
+
+def getsize(filename: AnyStr_) -> int:
+    """Return the size of a file, reported by os.stat()."""
+    return os.stat(filename).st_size
+
+
+def getmtime(filename: AnyStr_) -> float:
+    """Return the last modification time of a file, reported by os.stat()."""
+    return os.stat(filename).st_mtime
+
+
+def getatime(filename: AnyStr_) -> float:
+    """Return the last access time of a file, reported by os.stat()."""
+    return os.stat(filename).st_atime
+
+
+def getctime(filename: AnyStr_) -> float:
+    """Return the metadata change time of a file, reported by os.stat()."""
+    return os.stat(filename).st_ctime
+
+
+# Return the longest prefix of all list elements.
+def commonprefix(m: List_[Any_]) -> Any_:
+    "Given a list of pathnames, returns the longest common leading component"
+    if not m: return ''
+    s1 = min(m)
+    s2 = max(m)
+    for i, c in enumerate(s1):
+        if c != s2[i]:
+            return s1[:i]
+    return s1
+
+
+# Split a path in root and extension.
+# The extension is everything starting at the last dot in the last
+# pathname component; the root is everything before that.
+# It is always true that root + ext == p.
+
+# Generic implementation of splitext, to be parametrized with
+# the separators
+def _splitext(p: AnyStr_, sep: AnyStr_, altsep: AnyStr_,
+              extsep: AnyStr_) -> Tuple_[AnyStr_, AnyStr_]:
+    """Split the extension from a pathname.
+
+    Extension is everything from the last dot to the end, ignoring
+    leading dots.  Returns "(root, ext)"; ext may be empty."""
+    # NOTE: This code must work for text and bytes strings.
+
+    sepIndex = p.rfind(sep)
+    if altsep:
+        altsepIndex = p.rfind(altsep)
+        sepIndex = max(sepIndex, altsepIndex)
+
+    dotIndex = p.rfind(extsep)
+    if dotIndex > sepIndex:
+        # skip all leading dots
+        filenameIndex = sepIndex + 1
+        while filenameIndex < dotIndex:
+            if p[filenameIndex:filenameIndex+1] != extsep:
+                return p[:dotIndex], p[dotIndex:]
+            filenameIndex += 1
+
+    return p, p[:0]
diff --git a/test-data/stdlib-samples/3.2/getopt.py b/test-data/stdlib-samples/3.2/getopt.py
new file mode 100644
index 0000000..32f5bce
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/getopt.py
@@ -0,0 +1,220 @@
+"""Parser for command line options.
+
+This module helps scripts to parse the command line arguments in
+sys.argv.  It supports the same conventions as the Unix getopt()
+function (including the special meanings of arguments of the form `-'
+and `--').  Long options similar to those supported by GNU software
+may be used as well via an optional third argument.  This module
+provides two functions and an exception:
+
+getopt() -- Parse command line options
+gnu_getopt() -- Like getopt(), but allow option and non-option arguments
+to be intermixed.
+GetoptError -- exception (class) raised with 'opt' attribute, which is the
+option involved with the exception.
+"""
+
+# Long option support added by Lars Wirzenius <liw at iki.fi>.
+#
+# Gerrit Holl <gerrit at nl.linux.org> moved the string-based exceptions
+# to class-based exceptions.
+#
+# Peter Ã
strand <astrand at lysator.liu.se> added gnu_getopt().
+#
+# TODO for gnu_getopt():
+#
+# - GNU getopt_long_only mechanism
+# - allow the caller to specify ordering
+# - RETURN_IN_ORDER option
+# - GNU extension with '-' as first character of option string
+# - optional arguments, specified by double colons
+# - a option string with a W followed by semicolon should
+#   treat "-W foo" as "--foo"
+
+__all__ = ["GetoptError","error","getopt","gnu_getopt"]
+
+import os
+
+from typing import List, Tuple, Iterable
+
+class GetoptError(Exception):
+    opt = ''
+    msg = ''
+    def __init__(self, msg: str, opt: str = '') -> None:
+        self.msg = msg
+        self.opt = opt
+        Exception.__init__(self, msg, opt)
+
+    def __str__(self) -> str:
+        return self.msg
+
+error = GetoptError # backward compatibility
+
+def getopt(args: List[str], shortopts: str,
+           longopts: Iterable[str]  =  []) -> Tuple[List[Tuple[str, str]],
+                                                    List[str]]:
+    """getopt(args, options[, long_options]) -> opts, args
+
+    Parses command line options and parameter list.  args is the
+    argument list to be parsed, without the leading reference to the
+    running program.  Typically, this means "sys.argv[1:]".  shortopts
+    is the string of option letters that the script wants to
+    recognize, with options that require an argument followed by a
+    colon (i.e., the same format that Unix getopt() uses).  If
+    specified, longopts is a list of strings with the names of the
+    long options which should be supported.  The leading '--'
+    characters should not be included in the option name.  Options
+    which require an argument should be followed by an equal sign
+    ('=').
+
+    The return value consists of two elements: the first is a list of
+    (option, value) pairs; the second is the list of program arguments
+    left after the option list was stripped (this is a trailing slice
+    of the first argument).  Each option-and-value pair returned has
+    the option as its first element, prefixed with a hyphen (e.g.,
+    '-x'), and the option argument as its second element, or an empty
+    string if the option has no argument.  The options occur in the
+    list in the same order in which they were found, thus allowing
+    multiple occurrences.  Long and short options may be mixed.
+
+    """
+
+    opts = []  # type: List[Tuple[str, str]]
+    if isinstance(longopts, str):
+        longopts = [longopts]
+    else:
+        longopts = list(longopts)
+    while args and args[0].startswith('-') and args[0] != '-':
+        if args[0] == '--':
+            args = args[1:]
+            break
+        if args[0].startswith('--'):
+            opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
+        else:
+            opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
+
+    return opts, args
+
+def gnu_getopt(args: List[str], shortopts: str,
+               longopts: Iterable[str]  =  []) -> Tuple[List[Tuple[str, str]],
+                                                        List[str]]:
+    """getopt(args, options[, long_options]) -> opts, args
+
+    This function works like getopt(), except that GNU style scanning
+    mode is used by default. This means that option and non-option
+    arguments may be intermixed. The getopt() function stops
+    processing options as soon as a non-option argument is
+    encountered.
+
+    If the first character of the option string is `+', or if the
+    environment variable POSIXLY_CORRECT is set, then option
+    processing stops as soon as a non-option argument is encountered.
+
+    """
+
+    opts = []  # type: List[Tuple[str, str]]
+    prog_args = []  # type: List[str]
+    if isinstance(longopts, str):
+        longopts = [longopts]
+    else:
+        longopts = list(longopts)
+
+    # Allow options after non-option arguments?
+    if shortopts.startswith('+'):
+        shortopts = shortopts[1:]
+        all_options_first = True
+    elif os.environ.get("POSIXLY_CORRECT"):
+        all_options_first = True
+    else:
+        all_options_first = False
+
+    while args:
+        if args[0] == '--':
+            prog_args += args[1:]
+            break
+
+        if args[0][:2] == '--':
+            opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
+        elif args[0][:1] == '-' and args[0] != '-':
+            opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
+        else:
+            if all_options_first:
+                prog_args += args
+                break
+            else:
+                prog_args.append(args[0])
+                args = args[1:]
+
+    return opts, prog_args
+
+def do_longs(opts: List[Tuple[str, str]], opt: str,
+             longopts: List[str],
+             args: List[str]) -> Tuple[List[Tuple[str, str]], List[str]]:
+    try:
+        i = opt.index('=')
+    except ValueError:
+        optarg = None # type: str
+    else:
+        opt, optarg = opt[:i], opt[i+1:]
+
+    has_arg, opt = long_has_args(opt, longopts)
+    if has_arg:
+        if optarg is None:
+            if not args:
+                raise GetoptError('option --%s requires argument' % opt, opt)
+            optarg, args = args[0], args[1:]
+    elif optarg is not None:
+        raise GetoptError('option --%s must not have an argument' % opt, opt)
+    opts.append(('--' + opt, optarg or ''))
+    return opts, args
+
+# Return:
+#   has_arg?
+#   full option name
+def long_has_args(opt: str, longopts: List[str]) -> Tuple[bool, str]:
+    possibilities = [o for o in longopts if o.startswith(opt)]
+    if not possibilities:
+        raise GetoptError('option --%s not recognized' % opt, opt)
+    # Is there an exact match?
+    if opt in possibilities:
+        return False, opt
+    elif opt + '=' in possibilities:
+        return True, opt
+    # No exact match, so better be unique.
+    if len(possibilities) > 1:
+        # XXX since possibilities contains all valid continuations, might be
+        # nice to work them into the error msg
+        raise GetoptError('option --%s not a unique prefix' % opt, opt)
+    assert len(possibilities) == 1
+    unique_match = possibilities[0]
+    has_arg = unique_match.endswith('=')
+    if has_arg:
+        unique_match = unique_match[:-1]
+    return has_arg, unique_match
+
+def do_shorts(opts: List[Tuple[str, str]], optstring: str,
+              shortopts: str, args: List[str]) -> Tuple[List[Tuple[str, str]],
+                                                        List[str]]:
+    while optstring != '':
+        opt, optstring = optstring[0], optstring[1:]
+        if short_has_arg(opt, shortopts):
+            if optstring == '':
+                if not args:
+                    raise GetoptError('option -%s requires argument' % opt,
+                                      opt)
+                optstring, args = args[0], args[1:]
+            optarg, optstring = optstring, ''
+        else:
+            optarg = ''
+        opts.append(('-' + opt, optarg))
+    return opts, args
+
+def short_has_arg(opt: str, shortopts: str) -> bool:
+    for i in range(len(shortopts)):
+        if opt == shortopts[i] != ':':
+            return shortopts.startswith(':', i+1)
+    raise GetoptError('option -%s not recognized' % opt, opt)
+
+if __name__ == '__main__':
+    import sys
+    print(getopt(sys.argv[1:], "a:b", ["alpha=", "beta"]))
diff --git a/test-data/stdlib-samples/3.2/glob.py b/test-data/stdlib-samples/3.2/glob.py
new file mode 100644
index 0000000..0f3d5f5
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/glob.py
@@ -0,0 +1,84 @@
+"""Filename globbing utility."""
+
+import os
+import re
+import fnmatch
+
+from typing import List, Iterator, Iterable, Any, AnyStr
+
+__all__ = ["glob", "iglob"]
+
+def glob(pathname: AnyStr) -> List[AnyStr]:
+    """Return a list of paths matching a pathname pattern.
+
+    The pattern may contain simple shell-style wildcards a la fnmatch.
+
+    """
+    return list(iglob(pathname))
+
+def iglob(pathname: AnyStr) -> Iterator[AnyStr]:
+    """Return an iterator which yields the paths matching a pathname pattern.
+
+    The pattern may contain simple shell-style wildcards a la fnmatch.
+
+    """
+    if not has_magic(pathname):
+        if os.path.lexists(pathname):
+            yield pathname
+        return
+    dirname, basename = os.path.split(pathname)
+    if not dirname:
+        for name in glob1(None, basename):
+            yield name
+        return
+    if has_magic(dirname):
+        dirs = iglob(dirname) # type: Iterable[AnyStr]
+    else:
+        dirs = [dirname]
+    if has_magic(basename):
+        glob_in_dir = glob1 # type: Any
+    else:
+        glob_in_dir = glob0
+    for dirname in dirs:
+        for name in glob_in_dir(dirname, basename):
+            yield os.path.join(dirname, name)
+
+# These 2 helper functions non-recursively glob inside a literal directory.
+# They return a list of basenames. `glob1` accepts a pattern while `glob0`
+# takes a literal basename (so it only has to check for its existence).
+
+def glob1(dirname: AnyStr, pattern: AnyStr) -> List[AnyStr]:
+    if not dirname:
+        if isinstance(pattern, bytes):
+            dirname = bytes(os.curdir, 'ASCII')
+        else:
+            dirname = os.curdir
+    try:
+        names = os.listdir(dirname)
+    except os.error:
+        return []
+    if pattern[0] != '.':
+        names = [x for x in names if x[0] != '.']
+    return fnmatch.filter(names, pattern)
+
+def glob0(dirname: AnyStr, basename: AnyStr) -> List[AnyStr]:
+    if basename == '':
+        # `os.path.split()` returns an empty basename for paths ending with a
+        # directory separator.  'q*x/' should match only directories.
+        if os.path.isdir(dirname):
+            return [basename]
+    else:
+        if os.path.lexists(os.path.join(dirname, basename)):
+            return [basename]
+    return []
+
+
+magic_check = re.compile('[*?[]')
+magic_check_bytes = re.compile(b'[*?[]')
+
+def has_magic(s: AnyStr) -> bool:
+    if isinstance(s, bytes):
+        match = magic_check_bytes.search(s)
+    else:
+        match = magic_check.search(s)
+    return match is not None
diff --git a/test-data/stdlib-samples/3.2/incomplete/logging/__init__.py b/test-data/stdlib-samples/3.2/incomplete/logging/__init__.py
new file mode 100644
index 0000000..aa861eb
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/incomplete/logging/__init__.py
@@ -0,0 +1,1873 @@
+# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
+#
+# Permission to use, copy, modify, and distribute this software and its
+# documentation for any purpose and without fee is hereby granted,
+# provided that the above copyright notice appear in all copies and that
+# both that copyright notice and this permission notice appear in
+# supporting documentation, and that the name of Vinay Sajip
+# not be used in advertising or publicity pertaining to distribution
+# of the software without specific, written prior permission.
+# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
+# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
+# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
+# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
+# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Logging package for Python. Based on PEP 282 and comments thereto in
+comp.lang.python, and influenced by Apache's log4j system.
+
+Copyright (C) 2001-2011 Vinay Sajip. All Rights Reserved.
+
+To use, simply 'import logging' and log away!
+"""
+
+import sys, os, time, io, traceback, warnings, weakref
+from string import Template
+
+__all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
+           'FATAL', 'FileHandler', 'Filter', 'Formatter', 'Handler', 'INFO',
+           'LogRecord', 'Logger', 'LoggerAdapter', 'NOTSET', 'NullHandler',
+           'StreamHandler', 'WARN', 'WARNING', 'addLevelName', 'basicConfig',
+           'captureWarnings', 'critical', 'debug', 'disable', 'error',
+           'exception', 'fatal', 'getLevelName', 'getLogger', 'getLoggerClass',
+           'info', 'log', 'makeLogRecord', 'setLoggerClass', 'warn', 'warning',
+           'getLogRecordFactory', 'setLogRecordFactory', 'lastResort']
+
+import codecs
+
+import _thread as thread
+import threading
+
+__author__  = "Vinay Sajip <vinay_sajip at red-dove.com>"
+__status__  = "production"
+__version__ = "0.5.1.2"
+__date__    = "07 February 2010"
+
+#---------------------------------------------------------------------------
+#   Miscellaneous module data
+#---------------------------------------------------------------------------
+
+#
+# _srcfile is used when walking the stack to check when we've got the first
+# caller stack frame.
+#
+if hasattr(sys, 'frozen'): #support for py2exe
+    _srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:])
+else:
+    _srcfile = __file__
+_srcfile = os.path.normcase(_srcfile)
+
+# next bit filched from 1.5.2's inspect.py
+def _currentframe():
+    """Return the frame object for the caller's stack frame."""
+    try:
+        raise Exception
+    except:
+        return sys.exc_info()[2].tb_frame.f_back
+currentframe = _currentframe
+
+if hasattr(sys, '_getframe'): currentframe = lambda: sys._getframe(3)
+# done filching
+
+# _srcfile is only used in conjunction with sys._getframe().
+# To provide compatibility with older versions of Python, set _srcfile
+# to None if _getframe() is not available; this value will prevent
+# findCaller() from being called.
+#if not hasattr(sys, "_getframe"):
+#    _srcfile = None
+
+#
+#_startTime is used as the base when calculating the relative time of events
+#
+_startTime = time.time()
+
+#
+#raiseExceptions is used to see if exceptions during handling should be
+#propagated
+#
+raiseExceptions = 1
+
+#
+# If you don't want threading information in the log, set this to zero
+#
+logThreads = 1
+
+#
+# If you don't want multiprocessing information in the log, set this to zero
+#
+logMultiprocessing = 1
+
+#
+# If you don't want process information in the log, set this to zero
+#
+logProcesses = 1
+
+#---------------------------------------------------------------------------
+#   Level related stuff
+#---------------------------------------------------------------------------
+#
+# Default levels and level names, these can be replaced with any positive set
+# of values having corresponding names. There is a pseudo-level, NOTSET, which
+# is only really there as a lower limit for user-defined levels. Handlers and
+# loggers are initialized with NOTSET so that they will log all messages, even
+# at user-defined levels.
+#
+
+CRITICAL = 50
+FATAL = CRITICAL
+ERROR = 40
+WARNING = 30
+WARN = WARNING
+INFO = 20
+DEBUG = 10
+NOTSET = 0
+
+_levelNames = {
+    CRITICAL : 'CRITICAL',
+    ERROR : 'ERROR',
+    WARNING : 'WARNING',
+    INFO : 'INFO',
+    DEBUG : 'DEBUG',
+    NOTSET : 'NOTSET',
+    'CRITICAL' : CRITICAL,
+    'ERROR' : ERROR,
+    'WARN' : WARNING,
+    'WARNING' : WARNING,
+    'INFO' : INFO,
+    'DEBUG' : DEBUG,
+    'NOTSET' : NOTSET,
+}
+
+def getLevelName(level):
+    """
+    Return the textual representation of logging level 'level'.
+
+    If the level is one of the predefined levels (CRITICAL, ERROR, WARNING,
+    INFO, DEBUG) then you get the corresponding string. If you have
+    associated levels with names using addLevelName then the name you have
+    associated with 'level' is returned.
+
+    If a numeric value corresponding to one of the defined levels is passed
+    in, the corresponding string representation is returned.
+
+    Otherwise, the string "Level %s" % level is returned.
+    """
+    return _levelNames.get(level, ("Level %s" % level))
+
+def addLevelName(level, levelName):
+    """
+    Associate 'levelName' with 'level'.
+
+    This is used when converting levels to text during message formatting.
+    """
+    _acquireLock()
+    try:    #unlikely to cause an exception, but you never know...
+        _levelNames[level] = levelName
+        _levelNames[levelName] = level
+    finally:
+        _releaseLock()
+
+def _checkLevel(level):
+    if isinstance(level, int):
+        rv = level
+    elif str(level) == level:
+        if level not in _levelNames:
+            raise ValueError("Unknown level: %r" % level)
+        rv = _levelNames[level]
+    else:
+        raise TypeError("Level not an integer or a valid string: %r" % level)
+    return rv
+
+#---------------------------------------------------------------------------
+#   Thread-related stuff
+#---------------------------------------------------------------------------
+
+#
+#_lock is used to serialize access to shared data structures in this module.
+#This needs to be an RLock because fileConfig() creates and configures
+#Handlers, and so might arbitrary user threads. Since Handler code updates the
+#shared dictionary _handlers, it needs to acquire the lock. But if configuring,
+#the lock would already have been acquired - so we need an RLock.
+#The same argument applies to Loggers and Manager.loggerDict.
+#
+if thread:
+    _lock = threading.RLock()
+else:
+    _lock = None
+
+
+def _acquireLock():
+    """
+    Acquire the module-level lock for serializing access to shared data.
+
+    This should be released with _releaseLock().
+    """
+    if _lock:
+        _lock.acquire()
+
+def _releaseLock():
+    """
+    Release the module-level lock acquired by calling _acquireLock().
+    """
+    if _lock:
+        _lock.release()
+
+#---------------------------------------------------------------------------
+#   The logging record
+#---------------------------------------------------------------------------
+
+class LogRecord(object):
+    """
+    A LogRecord instance represents an event being logged.
+
+    LogRecord instances are created every time something is logged. They
+    contain all the information pertinent to the event being logged. The
+    main information passed in is in msg and args, which are combined
+    using str(msg) % args to create the message field of the record. The
+    record also includes information such as when the record was created,
+    the source line where the logging call was made, and any exception
+    information to be logged.
+    """
+    def __init__(self, name, level, pathname, lineno,
+                 msg, args, exc_info, func=None, sinfo=None, **kwargs):
+        """
+        Initialize a logging record with interesting information.
+        """
+        ct = time.time()
+        self.name = name
+        self.msg = msg
+        #
+        # The following statement allows passing of a dictionary as a sole
+        # argument, so that you can do something like
+        #  logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
+        # Suggested by Stefan Behnel.
+        # Note that without the test for args[0], we get a problem because
+        # during formatting, we test to see if the arg is present using
+        # 'if self.args:'. If the event being logged is e.g. 'Value is %d'
+        # and if the passed arg fails 'if self.args:' then no formatting
+        # is done. For example, logger.warn('Value is %d', 0) would log
+        # 'Value is %d' instead of 'Value is 0'.
+        # For the use case of passing a dictionary, this should not be a
+        # problem.
+        if args and len(args) == 1 and isinstance(args[0], dict) and args[0]:
+            args = args[0]
+        self.args = args
+        self.levelname = getLevelName(level)
+        self.levelno = level
+        self.pathname = pathname
+        try:
+            self.filename = os.path.basename(pathname)
+            self.module = os.path.splitext(self.filename)[0]
+        except (TypeError, ValueError, AttributeError):
+            self.filename = pathname
+            self.module = "Unknown module"
+        self.exc_info = exc_info
+        self.exc_text = None      # used to cache the traceback text
+        self.stack_info = sinfo
+        self.lineno = lineno
+        self.funcName = func
+        self.created = ct
+        self.msecs = (ct - int(ct)) * 1000
+        self.relativeCreated = (self.created - _startTime) * 1000
+        if logThreads and thread:
+            self.thread = thread.get_ident()
+            self.threadName = threading.current_thread().name
+        else:
+            self.thread = None
+            self.threadName = None
+        if not logMultiprocessing:
+            self.processName = None
+        else:
+            self.processName = 'MainProcess'
+            mp = sys.modules.get('multiprocessing')
+            if mp is not None:
+                # Errors may occur if multiprocessing has not finished loading
+                # yet - e.g. if a custom import hook causes third-party code
+                # to run when multiprocessing calls import. See issue 8200
+                # for an example
+                try:
+                    self.processName = mp.current_process().name
+                except Exception:
+                    pass
+        if logProcesses and hasattr(os, 'getpid'):
+            self.process = os.getpid()
+        else:
+            self.process = None
+
+    def __str__(self):
+        return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
+            self.pathname, self.lineno, self.msg)
+
+    def getMessage(self):
+        """
+        Return the message for this LogRecord.
+
+        Return the message for this LogRecord after merging any user-supplied
+        arguments with the message.
+        """
+        msg = str(self.msg)
+        if self.args:
+            msg = msg % self.args
+        return msg
+
+#
+#   Determine which class to use when instantiating log records.
+#
+_logRecordFactory = LogRecord
+
+def setLogRecordFactory(factory):
+    """
+    Set the factory to be used when instantiating a log record.
+
+    :param factory: A callable which will be called to instantiate
+    a log record.
+    """
+    global _logRecordFactory
+    _logRecordFactory = factory
+
+def getLogRecordFactory():
+    """
+    Return the factory to be used when instantiating a log record.
+    """
+
+    return _logRecordFactory
+
+def makeLogRecord(dict):
+    """
+    Make a LogRecord whose attributes are defined by the specified dictionary,
+    This function is useful for converting a logging event received over
+    a socket connection (which is sent as a dictionary) into a LogRecord
+    instance.
+    """
+    rv = _logRecordFactory(None, None, "", 0, "", (), None, None)
+    rv.__dict__.update(dict)
+    return rv
+
+#---------------------------------------------------------------------------
+#   Formatter classes and functions
+#---------------------------------------------------------------------------
+
+class PercentStyle(object):
+
+    default_format = '%(message)s'
+    asctime_format = '%(asctime)s'
+    asctime_search = '%(asctime)'
+
+    def __init__(self, fmt):
+        self._fmt = fmt or self.default_format
+
+    def usesTime(self):
+        return self._fmt.find(self.asctime_search) >= 0
+
+    def format(self, record):
+        return self._fmt % record.__dict__
+
+class StrFormatStyle(PercentStyle):
+    default_format = '{message}'
+    asctime_format = '{asctime}'
+    asctime_search = '{asctime'
+
+    def format(self, record):
+        return self._fmt.format(**record.__dict__)
+
+
+class StringTemplateStyle(PercentStyle):
+    default_format = '${message}'
+    asctime_format = '${asctime}'
+    asctime_search = '${asctime}'
+
+    def __init__(self, fmt):
+        self._fmt = fmt or self.default_format
+        self._tpl = Template(self._fmt)
+
+    def usesTime(self):
+        fmt = self._fmt
+        return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_format) >= 0
+
+    def format(self, record):
+        return self._tpl.substitute(**record.__dict__)
+
+_STYLES = {
+    '%': PercentStyle,
+    '{': StrFormatStyle,
+    '$': StringTemplateStyle
+}
+
+class Formatter(object):
+    """
+    Formatter instances are used to convert a LogRecord to text.
+
+    Formatters need to know how a LogRecord is constructed. They are
+    responsible for converting a LogRecord to (usually) a string which can
+    be interpreted by either a human or an external system. The base Formatter
+    allows a formatting string to be specified. If none is supplied, the
+    default value of "%s(message)" is used.
+
+    The Formatter can be initialized with a format string which makes use of
+    knowledge of the LogRecord attributes - e.g. the default value mentioned
+    above makes use of the fact that the user's message and arguments are pre-
+    formatted into a LogRecord's message attribute. Currently, the useful
+    attributes in a LogRecord are described by:
+
+    %(name)s            Name of the logger (logging channel)
+    %(levelno)s         Numeric logging level for the message (DEBUG, INFO,
+                        WARNING, ERROR, CRITICAL)
+    %(levelname)s       Text logging level for the message ("DEBUG", "INFO",
+                        "WARNING", "ERROR", "CRITICAL")
+    %(pathname)s        Full pathname of the source file where the logging
+                        call was issued (if available)
+    %(filename)s        Filename portion of pathname
+    %(module)s          Module (name portion of filename)
+    %(lineno)d          Source line number where the logging call was issued
+                        (if available)
+    %(funcName)s        Function name
+    %(created)f         Time when the LogRecord was created (time.time()
+                        return value)
+    %(asctime)s         Textual time when the LogRecord was created
+    %(msecs)d           Millisecond portion of the creation time
+    %(relativeCreated)d Time in milliseconds when the LogRecord was created,
+                        relative to the time the logging module was loaded
+                        (typically at application startup time)
+    %(thread)d          Thread ID (if available)
+    %(threadName)s      Thread name (if available)
+    %(process)d         Process ID (if available)
+    %(message)s         The result of record.getMessage(), computed just as
+                        the record is emitted
+    """
+
+    converter = time.localtime
+
+    def __init__(self, fmt=None, datefmt=None, style='%'):
+        """
+        Initialize the formatter with specified format strings.
+
+        Initialize the formatter either with the specified format string, or a
+        default as described above. Allow for specialized date formatting with
+        the optional datefmt argument (if omitted, you get the ISO8601 format).
+
+        Use a style parameter of '%', '{' or '$' to specify that you want to
+        use one of %-formatting, :meth:`str.format` (``{}``) formatting or
+        :class:`string.Template` formatting in your format string.
+
+        .. versionchanged: 3.2
+           Added the ``style`` parameter.
+        """
+        if style not in _STYLES:
+            raise ValueError('Style must be one of: %s' % ','.join(
+                             _STYLES.keys()))
+        self._style = _STYLES[style](fmt)
+        self._fmt = self._style._fmt
+        self.datefmt = datefmt
+
+    def formatTime(self, record, datefmt=None):
+        """
+        Return the creation time of the specified LogRecord as formatted text.
+
+        This method should be called from format() by a formatter which
+        wants to make use of a formatted time. This method can be overridden
+        in formatters to provide for any specific requirement, but the
+        basic behaviour is as follows: if datefmt (a string) is specified,
+        it is used with time.strftime() to format the creation time of the
+        record. Otherwise, the ISO8601 format is used. The resulting
+        string is returned. This function uses a user-configurable function
+        to convert the creation time to a tuple. By default, time.localtime()
+        is used; to change this for a particular formatter instance, set the
+        'converter' attribute to a function with the same signature as
+        time.localtime() or time.gmtime(). To change it for all formatters,
+        for example if you want all logging times to be shown in GMT,
+        set the 'converter' attribute in the Formatter class.
+        """
+        ct = self.converter(record.created)
+        if datefmt:
+            s = time.strftime(datefmt, ct)
+        else:
+            t = time.strftime("%Y-%m-%d %H:%M:%S", ct)
+            s = "%s,%03d" % (t, record.msecs) # the use of % here is internal
+        return s
+
+    def formatException(self, ei):
+        """
+        Format and return the specified exception information as a string.
+
+        This default implementation just uses
+        traceback.print_exception()
+        """
+        sio = io.StringIO()
+        tb = ei[2]
+        # See issues #9427, #1553375. Commented out for now.
+        #if getattr(self, 'fullstack', False):
+        #    traceback.print_stack(tb.tb_frame.f_back, file=sio)
+        traceback.print_exception(ei[0], ei[1], tb, None, sio)
+        s = sio.getvalue()
+        sio.close()
+        if s[-1:] == "\n":
+            s = s[:-1]
+        return s
+
+    def usesTime(self):
+        """
+        Check if the format uses the creation time of the record.
+        """
+        return self._style.usesTime()
+
+    def formatMessage(self, record):
+        return self._style.format(record)
+
+    def formatStack(self, stack_info):
+        """
+        This method is provided as an extension point for specialized
+        formatting of stack information.
+
+        The input data is a string as returned from a call to
+        :func:`traceback.print_stack`, but with the last trailing newline
+        removed.
+
+        The base implementation just returns the value passed in.
+        """
+        return stack_info
+
+    def format(self, record):
+        """
+        Format the specified record as text.
+
+        The record's attribute dictionary is used as the operand to a
+        string formatting operation which yields the returned string.
+        Before formatting the dictionary, a couple of preparatory steps
+        are carried out. The message attribute of the record is computed
+        using LogRecord.getMessage(). If the formatting string uses the
+        time (as determined by a call to usesTime(), formatTime() is
+        called to format the event time. If there is exception information,
+        it is formatted using formatException() and appended to the message.
+        """
+        record.message = record.getMessage()
+        if self.usesTime():
+            record.asctime = self.formatTime(record, self.datefmt)
+        s = self.formatMessage(record)
+        if record.exc_info:
+            # Cache the traceback text to avoid converting it multiple times
+            # (it's constant anyway)
+            if not record.exc_text:
+                record.exc_text = self.formatException(record.exc_info)
+        if record.exc_text:
+            if s[-1:] != "\n":
+                s = s + "\n"
+            s = s + record.exc_text
+        if record.stack_info:
+            if s[-1:] != "\n":
+                s = s + "\n"
+            s = s + self.formatStack(record.stack_info)
+        return s
+
+#
+#   The default formatter to use when no other is specified
+#
+_defaultFormatter = Formatter()
+
+class BufferingFormatter(object):
+    """
+    A formatter suitable for formatting a number of records.
+    """
+    def __init__(self, linefmt=None):
+        """
+        Optionally specify a formatter which will be used to format each
+        individual record.
+        """
+        if linefmt:
+            self.linefmt = linefmt
+        else:
+            self.linefmt = _defaultFormatter
+
+    def formatHeader(self, records):
+        """
+        Return the header string for the specified records.
+        """
+        return ""
+
+    def formatFooter(self, records):
+        """
+        Return the footer string for the specified records.
+        """
+        return ""
+
+    def format(self, records):
+        """
+        Format the specified records and return the result as a string.
+        """
+        rv = ""
+        if len(records) > 0:
+            rv = rv + self.formatHeader(records)
+            for record in records:
+                rv = rv + self.linefmt.format(record)
+            rv = rv + self.formatFooter(records)
+        return rv
+
+#---------------------------------------------------------------------------
+#   Filter classes and functions
+#---------------------------------------------------------------------------
+
+class Filter(object):
+    """
+    Filter instances are used to perform arbitrary filtering of LogRecords.
+
+    Loggers and Handlers can optionally use Filter instances to filter
+    records as desired. The base filter class only allows events which are
+    below a certain point in the logger hierarchy. For example, a filter
+    initialized with "A.B" will allow events logged by loggers "A.B",
+    "A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If
+    initialized with the empty string, all events are passed.
+    """
+    def __init__(self, name=''):
+        """
+        Initialize a filter.
+
+        Initialize with the name of the logger which, together with its
+        children, will have its events allowed through the filter. If no
+        name is specified, allow every event.
+        """
+        self.name = name
+        self.nlen = len(name)
+
+    def filter(self, record):
+        """
+        Determine if the specified record is to be logged.
+
+        Is the specified record to be logged? Returns 0 for no, nonzero for
+        yes. If deemed appropriate, the record may be modified in-place.
+        """
+        if self.nlen == 0:
+            return 1
+        elif self.name == record.name:
+            return 1
+        elif record.name.find(self.name, 0, self.nlen) != 0:
+            return 0
+        return (record.name[self.nlen] == ".")
+
+class Filterer(object):
+    """
+    A base class for loggers and handlers which allows them to share
+    common code.
+    """
+    def __init__(self):
+        """
+        Initialize the list of filters to be an empty list.
+        """
+        self.filters = []
+
+    def addFilter(self, filter):
+        """
+        Add the specified filter to this handler.
+        """
+        if not (filter in self.filters):
+            self.filters.append(filter)
+
+    def removeFilter(self, filter):
+        """
+        Remove the specified filter from this handler.
+        """
+        if filter in self.filters:
+            self.filters.remove(filter)
+
+    def filter(self, record):
+        """
+        Determine if a record is loggable by consulting all the filters.
+
+        The default is to allow the record to be logged; any filter can veto
+        this and the record is then dropped. Returns a zero value if a record
+        is to be dropped, else non-zero.
+
+        .. versionchanged: 3.2
+
+           Allow filters to be just callables.
+        """
+        rv = 1
+        for f in self.filters:
+            if hasattr(f, 'filter'):
+                result = f.filter(record)
+            else:
+                result = f(record) # assume callable - will raise if not
+            if not result:
+                rv = 0
+                break
+        return rv
+
+#---------------------------------------------------------------------------
+#   Handler classes and functions
+#---------------------------------------------------------------------------
+
+any _handlers = weakref.WeakValueDictionary()  #map of handler names to handlers
+any _handlerList = [] # added to allow handlers to be removed in reverse of order initialized
+
+def _removeHandlerRef(wr):
+    """
+    Remove a handler reference from the internal cleanup list.
+    """
+    # This function can be called during module teardown, when globals are
+    # set to None. If _acquireLock is None, assume this is the case and do
+    # nothing.
+    if _acquireLock is not None:
+        _acquireLock()
+        try:
+            if wr in _handlerList:
+                _handlerList.remove(wr)
+        finally:
+            _releaseLock()
+
+def _addHandlerRef(handler):
+    """
+    Add a handler to the internal cleanup list using a weak reference.
+    """
+    _acquireLock()
+    try:
+        _handlerList.append(weakref.ref(handler, _removeHandlerRef))
+    finally:
+        _releaseLock()
+
+class Handler(Filterer):
+    """
+    Handler instances dispatch logging events to specific destinations.
+
+    The base handler class. Acts as a placeholder which defines the Handler
+    interface. Handlers can optionally use Formatter instances to format
+    records as desired. By default, no formatter is specified; in this case,
+    the 'raw' message as determined by record.message is logged.
+    """
+    def __init__(self, level=NOTSET):
+        """
+        Initializes the instance - basically setting the formatter to None
+        and the filter list to empty.
+        """
+        Filterer.__init__(self)
+        self._name = None
+        self.level = _checkLevel(level)
+        self.formatter = None
+        # Add the handler to the global _handlerList (for cleanup on shutdown)
+        _addHandlerRef(self)
+        self.createLock()
+
+    def get_name(self):
+        return self._name
+
+    def set_name(self, name):
+        _acquireLock()
+        try:
+            if self._name in _handlers:
+                del _handlers[self._name]
+            self._name = name
+            if name:
+                _handlers[name] = self
+        finally:
+            _releaseLock()
+
+    #name = property(get_name, set_name)
+
+    def createLock(self):
+        """
+        Acquire a thread lock for serializing access to the underlying I/O.
+        """
+        if thread:
+            self.lock = threading.RLock()
+        else:
+            self.lock = None
+
+    def acquire(self):
+        """
+        Acquire the I/O thread lock.
+        """
+        if self.lock:
+            self.lock.acquire()
+
+    def release(self):
+        """
+        Release the I/O thread lock.
+        """
+        if self.lock:
+            self.lock.release()
+
+    def setLevel(self, level):
+        """
+        Set the logging level of this handler.
+        """
+        self.level = _checkLevel(level)
+
+    def format(self, record):
+        """
+        Format the specified record.
+
+        If a formatter is set, use it. Otherwise, use the default formatter
+        for the module.
+        """
+        if self.formatter:
+            fmt = self.formatter
+        else:
+            fmt = _defaultFormatter
+        return fmt.format(record)
+
+    def emit(self, record):
+        """
+        Do whatever it takes to actually log the specified logging record.
+
+        This version is intended to be implemented by subclasses and so
+        raises a NotImplementedError.
+        """
+        raise NotImplementedError('emit must be implemented '
+                                  'by Handler subclasses')
+
+    def handle(self, record):
+        """
+        Conditionally emit the specified logging record.
+
+        Emission depends on filters which may have been added to the handler.
+        Wrap the actual emission of the record with acquisition/release of
+        the I/O thread lock. Returns whether the filter passed the record for
+        emission.
+        """
+        rv = self.filter(record)
+        if rv:
+            self.acquire()
+            try:
+                self.emit(record)
+            finally:
+                self.release()
+        return rv
+
+    def setFormatter(self, fmt):
+        """
+        Set the formatter for this handler.
+        """
+        self.formatter = fmt
+
+    def flush(self):
+        """
+        Ensure all logging output has been flushed.
+
+        This version does nothing and is intended to be implemented by
+        subclasses.
+        """
+        pass
+
+    def close(self):
+        """
+        Tidy up any resources used by the handler.
+
+        This version removes the handler from an internal map of handlers,
+        _handlers, which is used for handler lookup by name. Subclasses
+        should ensure that this gets called from overridden close()
+        methods.
+        """
+        #get the module data lock, as we're updating a shared structure.
+        _acquireLock()
+        try:    #unlikely to raise an exception, but you never know...
+            if self._name and self._name in _handlers:
+                del _handlers[self._name]
+        finally:
+            _releaseLock()
+
+    def handleError(self, record):
+        """
+        Handle errors which occur during an emit() call.
+
+        This method should be called from handlers when an exception is
+        encountered during an emit() call. If raiseExceptions is false,
+        exceptions get silently ignored. This is what is mostly wanted
+        for a logging system - most users will not care about errors in
+        the logging system, they are more interested in application errors.
+        You could, however, replace this with a custom handler if you wish.
+        The record which was being processed is passed in to this method.
+        """
+        if raiseExceptions and sys.stderr:  # see issue 13807
+            ei = sys.exc_info()
+            try:
+                traceback.print_exception(ei[0], ei[1], ei[2],
+                                          None, sys.stderr)
+                sys.stderr.write('Logged from file %s, line %s\n' % (
+                                 record.filename, record.lineno))
+            except IOError:
+                pass    # see issue 5971
+            finally:
+                ei = None
+
+class StreamHandler(Handler):
+    """
+    A handler class which writes logging records, appropriately formatted,
+    to a stream. Note that this class does not close the stream, as
+    sys.stdout or sys.stderr may be used.
+    """
+
+    terminator = '\n'
+
+    def __init__(self, stream=None):
+        """
+        Initialize the handler.
+
+        If stream is not specified, sys.stderr is used.
+        """
+        Handler.__init__(self)
+        if stream is None:
+            stream = sys.stderr
+        self.stream = stream
+
+    def flush(self):
+        """
+        Flushes the stream.
+        """
+        if self.stream and hasattr(self.stream, "flush"):
+            self.stream.flush()
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        If a formatter is specified, it is used to format the record.
+        The record is then written to the stream with a trailing newline.  If
+        exception information is present, it is formatted using
+        traceback.print_exception and appended to the stream.  If the stream
+        has an 'encoding' attribute, it is used to determine how to do the
+        output to the stream.
+        """
+        try:
+            msg = self.format(record)
+            stream = self.stream
+            stream.write(msg)
+            stream.write(self.terminator)
+            self.flush()
+        except (KeyboardInterrupt, SystemExit):
+            raise
+        except:
+            self.handleError(record)
+
+class FileHandler(StreamHandler):
+    """
+    A handler class which writes formatted logging records to disk files.
+    """
+    def __init__(self, filename, mode='a', encoding=None, delay=0):
+        """
+        Open the specified file and use it as the stream for logging.
+        """
+        #keep the absolute path, otherwise derived classes which use this
+        #may come a cropper when the current directory changes
+        if codecs is None:
+            encoding = None
+        self.baseFilename = os.path.abspath(filename)
+        self.mode = mode
+        self.encoding = encoding
+        if delay:
+            #We don't open the stream, but we still need to call the
+            #Handler constructor to set level, formatter, lock etc.
+            Handler.__init__(self)
+            self.stream = None
+        else:
+            StreamHandler.__init__(self, self._open())
+
+    def close(self):
+        """
+        Closes the stream.
+        """
+        if self.stream:
+            self.flush()
+            if hasattr(self.stream, "close"):
+                self.stream.close()
+            StreamHandler.close(self)
+            self.stream = None
+
+    def _open(self):
+        """
+        Open the current base file with the (original) mode and encoding.
+        Return the resulting stream.
+        """
+        if self.encoding is None:
+            stream = open(self.baseFilename, self.mode)
+        else:
+            stream = codecs.open(self.baseFilename, self.mode, self.encoding)
+        return stream
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        If the stream was not opened because 'delay' was specified in the
+        constructor, open it before calling the superclass's emit.
+        """
+        if self.stream is None:
+            self.stream = self._open()
+        StreamHandler.emit(self, record)
+
+class _StderrHandler(StreamHandler):
+    """
+    This class is like a StreamHandler using sys.stderr, but always uses
+    whatever sys.stderr is currently set to rather than the value of
+    sys.stderr at handler construction time.
+    """
+    def __init__(self, level=NOTSET):
+        """
+        Initialize the handler.
+        """
+        Handler.__init__(self, level)
+
+    #@property
+    #def stream(self):
+    #    return sys.stderr
+
+
+_defaultLastResort = _StderrHandler(WARNING)
+lastResort = _defaultLastResort
+
+#---------------------------------------------------------------------------
+#   Manager classes and functions
+#---------------------------------------------------------------------------
+
+class PlaceHolder(object):
+    """
+    PlaceHolder instances are used in the Manager logger hierarchy to take
+    the place of nodes for which no loggers have been defined. This class is
+    intended for internal use only and not as part of the public API.
+    """
+    def __init__(self, alogger):
+        """
+        Initialize with the specified logger being a child of this placeholder.
+        """
+        self.loggerMap = { alogger : None }
+
+    def append(self, alogger):
+        """
+        Add the specified logger as a child of this placeholder.
+        """
+        if alogger not in self.loggerMap:
+            self.loggerMap[alogger] = None
+
+#
+#   Determine which class to use when instantiating loggers.
+#
+any _loggerClass = None
+
+def setLoggerClass(klass):
+    """
+    Set the class to be used when instantiating a logger. The class should
+    define __init__() such that only a name argument is required, and the
+    __init__() should call Logger.__init__()
+    """
+    if klass != Logger:
+        if not issubclass(klass, Logger):
+            raise TypeError("logger not derived from logging.Logger: "
+                            + klass.__name__)
+    global _loggerClass
+    _loggerClass = klass
+
+def getLoggerClass():
+    """
+    Return the class to be used when instantiating a logger.
+    """
+
+    return _loggerClass
+
+class Manager(object):
+    """
+    There is [under normal circumstances] just one Manager instance, which
+    holds the hierarchy of loggers.
+    """
+    def __init__(self, rootnode):
+        """
+        Initialize the manager with the root node of the logger hierarchy.
+        """
+        self.root = rootnode
+        self.disable = 0
+        self.emittedNoHandlerWarning = False
+        self.loggerDict = {}
+        self.loggerClass = None
+        self.logRecordFactory = None
+
+    def getLogger(self, name):
+        """
+        Get a logger with the specified name (channel name), creating it
+        if it doesn't yet exist. This name is a dot-separated hierarchical
+        name, such as "a", "a.b", "a.b.c" or similar.
+
+        If a PlaceHolder existed for the specified name [i.e. the logger
+        didn't exist but a child of it did], replace it with the created
+        logger and fix up the parent/child references which pointed to the
+        placeholder to now point to the logger.
+        """
+        rv = None
+        if not isinstance(name, str):
+            raise TypeError('A logger name must be a string')
+        _acquireLock()
+        try:
+            if name in self.loggerDict:
+                rv = self.loggerDict[name]
+                if isinstance(rv, PlaceHolder):
+                    ph = rv
+                    rv = (self.loggerClass or _loggerClass)(name)
+                    rv.manager = self
+                    self.loggerDict[name] = rv
+                    self._fixupChildren(ph, rv)
+                    self._fixupParents(rv)
+            else:
+                rv = (self.loggerClass or _loggerClass)(name)
+                rv.manager = self
+                self.loggerDict[name] = rv
+                self._fixupParents(rv)
+        finally:
+            _releaseLock()
+        return rv
+
+    def setLoggerClass(self, klass):
+        """
+        Set the class to be used when instantiating a logger with this Manager.
+        """
+        if klass != Logger:
+            if not issubclass(klass, Logger):
+                raise TypeError("logger not derived from logging.Logger: "
+                                + klass.__name__)
+        self.loggerClass = klass
+
+    def setLogRecordFactory(self, factory):
+        """
+        Set the factory to be used when instantiating a log record with this
+        Manager.
+        """
+        self.logRecordFactory = factory
+
+    def _fixupParents(self, alogger):
+        """
+        Ensure that there are either loggers or placeholders all the way
+        from the specified logger to the root of the logger hierarchy.
+        """
+        name = alogger.name
+        i = name.rfind(".")
+        rv = None
+        while (i > 0) and not rv:
+            substr = name[:i]
+            if substr not in self.loggerDict:
+                self.loggerDict[substr] = PlaceHolder(alogger)
+            else:
+                obj = self.loggerDict[substr]
+                if isinstance(obj, Logger):
+                    rv = obj
+                else:
+                    assert isinstance(obj, PlaceHolder)
+                    obj.append(alogger)
+            i = name.rfind(".", 0, i - 1)
+        if not rv:
+            rv = self.root
+        alogger.parent = rv
+
+    def _fixupChildren(self, ph, alogger):
+        """
+        Ensure that children of the placeholder ph are connected to the
+        specified logger.
+        """
+        name = alogger.name
+        namelen = len(name)
+        for c in ph.loggerMap.keys():
+            #The if means ... if not c.parent.name.startswith(nm)
+            if c.parent.name[:namelen] != name:
+                alogger.parent = c.parent
+                c.parent = alogger
+
+#---------------------------------------------------------------------------
+#   Logger classes and functions
+#---------------------------------------------------------------------------
+
+class Logger(Filterer):
+    """
+    Instances of the Logger class represent a single logging channel. A
+    "logging channel" indicates an area of an application. Exactly how an
+    "area" is defined is up to the application developer. Since an
+    application can have any number of areas, logging channels are identified
+    by a unique string. Application areas can be nested (e.g. an area
+    of "input processing" might include sub-areas "read CSV files", "read
+    XLS files" and "read Gnumeric files"). To cater for this natural nesting,
+    channel names are organized into a namespace hierarchy where levels are
+    separated by periods, much like the Java or Python package namespace. So
+    in the instance given above, channel names might be "input" for the upper
+    level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels.
+    There is no arbitrary limit to the depth of nesting.
+    """
+
+    any root
+    any manager
+
+    def __init__(self, name, level=NOTSET):
+        """
+        Initialize the logger with a name and an optional level.
+        """
+        Filterer.__init__(self)
+        self.name = name
+        self.level = _checkLevel(level)
+        self.parent = None
+        self.propagate = 1
+        self.handlers = []
+        self.disabled = 0
+
+    def setLevel(self, level):
+        """
+        Set the logging level of this logger.
+        """
+        self.level = _checkLevel(level)
+
+    def debug(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'DEBUG'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
+        """
+        if self.isEnabledFor(DEBUG):
+            self._log(DEBUG, msg, args, **kwargs)
+
+    def info(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'INFO'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
+        """
+        if self.isEnabledFor(INFO):
+            self._log(INFO, msg, args, **kwargs)
+
+    def warning(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'WARNING'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1)
+        """
+        if self.isEnabledFor(WARNING):
+            self._log(WARNING, msg, args, **kwargs)
+
+    warn = warning
+
+    def error(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'ERROR'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.error("Houston, we have a %s", "major problem", exc_info=1)
+        """
+        if self.isEnabledFor(ERROR):
+            self._log(ERROR, msg, args, **kwargs)
+
+    def exception(self, msg, *args, **kwargs):
+        """
+        Convenience method for logging an ERROR with exception information.
+        """
+        kwargs['exc_info'] = True
+        self.error(msg, *args, **kwargs)
+
+    def critical(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'CRITICAL'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.critical("Houston, we have a %s", "major disaster", exc_info=1)
+        """
+        if self.isEnabledFor(CRITICAL):
+            self._log(CRITICAL, msg, args, **kwargs)
+
+    fatal = critical
+
+    def log(self, level, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with the integer severity 'level'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.log(level, "We have a %s", "mysterious problem", exc_info=1)
+        """
+        if not isinstance(level, int):
+            if raiseExceptions:
+                raise TypeError("level must be an integer")
+            else:
+                return
+        if self.isEnabledFor(level):
+            self._log(level, msg, args, **kwargs)
+
+    def findCaller(self, stack_info=False):
+        """
+        Find the stack frame of the caller so that we can note the source
+        file name, line number and function name.
+        """
+        f = currentframe()
+        #On some versions of IronPython, currentframe() returns None if
+        #IronPython isn't run with -X:Frames.
+        if f is not None:
+            f = f.f_back
+        rv = "(unknown file)", 0, "(unknown function)", None
+        while hasattr(f, "f_code"):
+            co = f.f_code
+            filename = os.path.normcase(co.co_filename)
+            if filename == _srcfile:
+                f = f.f_back
+                continue
+            sinfo = None
+            if stack_info:
+                sio = io.StringIO()
+                sio.write('Stack (most recent call last):\n')
+                traceback.print_stack(f, file=sio)
+                sinfo = sio.getvalue()
+                if sinfo[-1] == '\n':
+                    sinfo = sinfo[:-1]
+                sio.close()
+            rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
+            break
+        return rv
+
+    def makeRecord(self, name, level, fn, lno, msg, args, exc_info,
+                   func=None, extra=None, sinfo=None):
+        """
+        A factory method which can be overridden in subclasses to create
+        specialized LogRecords.
+        """
+        rv = _logRecordFactory(name, level, fn, lno, msg, args, exc_info, func,
+                             sinfo)
+        if extra is not None:
+            for key in extra:
+                if (key in ["message", "asctime"]) or (key in rv.__dict__):
+                    raise KeyError("Attempt to overwrite %r in LogRecord" % key)
+                rv.__dict__[key] = extra[key]
+        return rv
+
+    def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False):
+        """
+        Low-level logging routine which creates a LogRecord and then calls
+        all the handlers of this logger to handle the record.
+        """
+        sinfo = None
+        if _srcfile:
+            #IronPython doesn't track Python frames, so findCaller throws an
+            #exception on some versions of IronPython. We trap it here so that
+            #IronPython can use logging.
+            try:
+                fn, lno, func, sinfo = self.findCaller(stack_info)
+            except ValueError:
+                fn, lno, func = "(unknown file)", 0, "(unknown function)"
+        else:
+            fn, lno, func = "(unknown file)", 0, "(unknown function)"
+        if exc_info:
+            if not isinstance(exc_info, tuple):
+                exc_info = sys.exc_info()
+        record = self.makeRecord(self.name, level, fn, lno, msg, args,
+                                 exc_info, func, extra, sinfo)
+        self.handle(record)
+
+    def handle(self, record):
+        """
+        Call the handlers for the specified record.
+
+        This method is used for unpickled records received from a socket, as
+        well as those created locally. Logger-level filtering is applied.
+        """
+        if (not self.disabled) and self.filter(record):
+            self.callHandlers(record)
+
+    def addHandler(self, hdlr):
+        """
+        Add the specified handler to this logger.
+        """
+        _acquireLock()
+        try:
+            if not (hdlr in self.handlers):
+                self.handlers.append(hdlr)
+        finally:
+            _releaseLock()
+
+    def removeHandler(self, hdlr):
+        """
+        Remove the specified handler from this logger.
+        """
+        _acquireLock()
+        try:
+            if hdlr in self.handlers:
+                self.handlers.remove(hdlr)
+        finally:
+            _releaseLock()
+
+    def hasHandlers(self):
+        """
+        See if this logger has any handlers configured.
+
+        Loop through all handlers for this logger and its parents in the
+        logger hierarchy. Return True if a handler was found, else False.
+        Stop searching up the hierarchy whenever a logger with the "propagate"
+        attribute set to zero is found - that will be the last logger which
+        is checked for the existence of handlers.
+        """
+        c = self
+        rv = False
+        while c:
+            if c.handlers:
+                rv = True
+                break
+            if not c.propagate:
+                break
+            else:
+                c = c.parent
+        return rv
+
+    def callHandlers(self, record):
+        """
+        Pass a record to all relevant handlers.
+
+        Loop through all handlers for this logger and its parents in the
+        logger hierarchy. If no handler was found, output a one-off error
+        message to sys.stderr. Stop searching up the hierarchy whenever a
+        logger with the "propagate" attribute set to zero is found - that
+        will be the last logger whose handlers are called.
+        """
+        c = self
+        found = 0
+        while c:
+            for hdlr in c.handlers:
+                found = found + 1
+                if record.levelno >= hdlr.level:
+                    hdlr.handle(record)
+            if not c.propagate:
+                c = None    #break out
+            else:
+                c = c.parent
+        if (found == 0):
+            if lastResort:
+                if record.levelno >= lastResort.level:
+                    lastResort.handle(record)
+            elif raiseExceptions and not self.manager.emittedNoHandlerWarning:
+                sys.stderr.write("No handlers could be found for logger"
+                                 " \"%s\"\n" % self.name)
+                self.manager.emittedNoHandlerWarning = True
+
+    def getEffectiveLevel(self):
+        """
+        Get the effective level for this logger.
+
+        Loop through this logger and its parents in the logger hierarchy,
+        looking for a non-zero logging level. Return the first one found.
+        """
+        logger = self
+        while logger:
+            if logger.level:
+                return logger.level
+            logger = logger.parent
+        return NOTSET
+
+    def isEnabledFor(self, level):
+        """
+        Is this logger enabled for level 'level'?
+        """
+        if self.manager.disable >= level:
+            return 0
+        return level >= self.getEffectiveLevel()
+
+    def getChild(self, suffix):
+        """
+        Get a logger which is a descendant to this one.
+
+        This is a convenience method, such that
+
+        logging.getLogger('abc').getChild('def.ghi')
+
+        is the same as
+
+        logging.getLogger('abc.def.ghi')
+
+        It's useful, for example, when the parent logger is named using
+        __name__ rather than a literal string.
+        """
+        if self.root is not self:
+            suffix = '.'.join((self.name, suffix))
+        return self.manager.getLogger(suffix)
+
+class RootLogger(Logger):
+    """
+    A root logger is not that different to any other logger, except that
+    it must have a logging level and there is only one instance of it in
+    the hierarchy.
+    """
+    def __init__(self, level):
+        """
+        Initialize the logger with the name "root".
+        """
+        Logger.__init__(self, "root", level)
+
+_loggerClass = Logger
+
+class LoggerAdapter(object):
+    """
+    An adapter for loggers which makes it easier to specify contextual
+    information in logging output.
+    """
+
+    def __init__(self, logger, extra):
+        """
+        Initialize the adapter with a logger and a dict-like object which
+        provides contextual information. This constructor signature allows
+        easy stacking of LoggerAdapters, if so desired.
+
+        You can effectively pass keyword arguments as shown in the
+        following example:
+
+        adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2"))
+        """
+        self.logger = logger
+        self.extra = extra
+
+    def process(self, msg, kwargs):
+        """
+        Process the logging message and keyword arguments passed in to
+        a logging call to insert contextual information. You can either
+        manipulate the message itself, the keyword args or both. Return
+        the message and kwargs modified (or not) to suit your needs.
+
+        Normally, you'll only need to override this one method in a
+        LoggerAdapter subclass for your specific needs.
+        """
+        kwargs["extra"] = self.extra
+        return msg, kwargs
+
+    #
+    # Boilerplate convenience methods
+    #
+    def debug(self, msg, *args, **kwargs):
+        """
+        Delegate a debug call to the underlying logger.
+        """
+        self.log(DEBUG, msg, *args, **kwargs)
+
+    def info(self, msg, *args, **kwargs):
+        """
+        Delegate an info call to the underlying logger.
+        """
+        self.log(INFO, msg, *args, **kwargs)
+
+    def warning(self, msg, *args, **kwargs):
+        """
+        Delegate a warning call to the underlying logger.
+        """
+        self.log(WARNING, msg, *args, **kwargs)
+
+    warn = warning
+
+    def error(self, msg, *args, **kwargs):
+        """
+        Delegate an error call to the underlying logger.
+        """
+        self.log(ERROR, msg, *args, **kwargs)
+
+    def exception(self, msg, *args, **kwargs):
+        """
+        Delegate an exception call to the underlying logger.
+        """
+        kwargs["exc_info"] = 1
+        self.log(ERROR, msg, *args, **kwargs)
+
+    def critical(self, msg, *args, **kwargs):
+        """
+        Delegate a critical call to the underlying logger.
+        """
+        self.log(CRITICAL, msg, *args, **kwargs)
+
+    def log(self, level, msg, *args, **kwargs):
+        """
+        Delegate a log call to the underlying logger, after adding
+        contextual information from this adapter instance.
+        """
+        if self.isEnabledFor(level):
+            msg, kwargs = self.process(msg, kwargs)
+            self.logger._log(level, msg, args, **kwargs)
+
+    def isEnabledFor(self, level):
+        """
+        Is this logger enabled for level 'level'?
+        """
+        if self.logger.manager.disable >= level:
+            return False
+        return level >= self.getEffectiveLevel()
+
+    def setLevel(self, level):
+        """
+        Set the specified level on the underlying logger.
+        """
+        self.logger.setLevel(level)
+
+    def getEffectiveLevel(self):
+        """
+        Get the effective level for the underlying logger.
+        """
+        return self.logger.getEffectiveLevel()
+
+    def hasHandlers(self):
+        """
+        See if the underlying logger has any handlers.
+        """
+        return self.logger.hasHandlers()
+
+root = RootLogger(WARNING)
+Logger.root = root
+Logger.manager = Manager(Logger.root)
+
+#---------------------------------------------------------------------------
+# Configuration classes and functions
+#---------------------------------------------------------------------------
+
+BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
+
+def basicConfig(**kwargs):
+    """
+    Do basic configuration for the logging system.
+
+    This function does nothing if the root logger already has handlers
+    configured. It is a convenience method intended for use by simple scripts
+    to do one-shot configuration of the logging package.
+
+    The default behaviour is to create a StreamHandler which writes to
+    sys.stderr, set a formatter using the BASIC_FORMAT format string, and
+    add the handler to the root logger.
+
+    A number of optional keyword arguments may be specified, which can alter
+    the default behaviour.
+
+    filename  Specifies that a FileHandler be created, using the specified
+              filename, rather than a StreamHandler.
+    filemode  Specifies the mode to open the file, if filename is specified
+              (if filemode is unspecified, it defaults to 'a').
+    format    Use the specified format string for the handler.
+    datefmt   Use the specified date/time format.
+    style     If a format string is specified, use this to specify the
+              type of format string (possible values '%', '{', '$', for
+              %-formatting, :meth:`str.format` and :class:`string.Template`
+              - defaults to '%').
+    level     Set the root logger level to the specified level.
+    stream    Use the specified stream to initialize the StreamHandler. Note
+              that this argument is incompatible with 'filename' - if both
+              are present, 'stream' is ignored.
+
+    Note that you could specify a stream created using open(filename, mode)
+    rather than passing the filename and mode in. However, it should be
+    remembered that StreamHandler does not close its stream (since it may be
+    using sys.stdout or sys.stderr), whereas FileHandler closes its stream
+    when the handler is closed.
+
+    .. versionchanged: 3.2
+       Added the ``style`` parameter.
+    """
+    # Add thread safety in case someone mistakenly calls
+    # basicConfig() from multiple threads
+    _acquireLock()
+    try:
+        if len(root.handlers) == 0:
+            filename = kwargs.get("filename")
+            if filename:
+                mode = kwargs.get("filemode", 'a')
+                hdlr = FileHandler(filename, mode)
+            else:
+                stream = kwargs.get("stream")
+                hdlr = StreamHandler(stream)
+            fs = kwargs.get("format", BASIC_FORMAT)
+            dfs = kwargs.get("datefmt", None)
+            style = kwargs.get("style", '%')
+            fmt = Formatter(fs, dfs, style)
+            hdlr.setFormatter(fmt)
+            root.addHandler(hdlr)
+            level = kwargs.get("level")
+            if level is not None:
+                root.setLevel(level)
+    finally:
+        _releaseLock()
+
+#---------------------------------------------------------------------------
+# Utility functions at module level.
+# Basically delegate everything to the root logger.
+#---------------------------------------------------------------------------
+
+def getLogger(name=None):
+    """
+    Return a logger with the specified name, creating it if necessary.
+
+    If no name is specified, return the root logger.
+    """
+    if name:
+        return Logger.manager.getLogger(name)
+    else:
+        return root
+
+def critical(msg, *args, **kwargs):
+    """
+    Log a message with severity 'CRITICAL' on the root logger. If the logger
+    has no handlers, call basicConfig() to add a console handler with a
+    pre-defined format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.critical(msg, *args, **kwargs)
+
+fatal = critical
+
+def error(msg, *args, **kwargs):
+    """
+    Log a message with severity 'ERROR' on the root logger. If the logger has
+    no handlers, call basicConfig() to add a console handler with a pre-defined
+    format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.error(msg, *args, **kwargs)
+
+def exception(msg, *args, **kwargs):
+    """
+    Log a message with severity 'ERROR' on the root logger, with exception
+    information. If the logger has no handlers, basicConfig() is called to add
+    a console handler with a pre-defined format.
+    """
+    kwargs['exc_info'] = True
+    error(msg, *args, **kwargs)
+
+def warning(msg, *args, **kwargs):
+    """
+    Log a message with severity 'WARNING' on the root logger. If the logger has
+    no handlers, call basicConfig() to add a console handler with a pre-defined
+    format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.warning(msg, *args, **kwargs)
+
+warn = warning
+
+def info(msg, *args, **kwargs):
+    """
+    Log a message with severity 'INFO' on the root logger. If the logger has
+    no handlers, call basicConfig() to add a console handler with a pre-defined
+    format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.info(msg, *args, **kwargs)
+
+def debug(msg, *args, **kwargs):
+    """
+    Log a message with severity 'DEBUG' on the root logger. If the logger has
+    no handlers, call basicConfig() to add a console handler with a pre-defined
+    format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.debug(msg, *args, **kwargs)
+
+def log(level, msg, *args, **kwargs):
+    """
+    Log 'msg % args' with the integer severity 'level' on the root logger. If
+    the logger has no handlers, call basicConfig() to add a console handler
+    with a pre-defined format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.log(level, msg, *args, **kwargs)
+
+def disable(level):
+    """
+    Disable all logging calls of severity 'level' and below.
+    """
+    root.manager.disable = level
+
+def shutdown(handlerList=_handlerList):
+    """
+    Perform any cleanup actions in the logging system (e.g. flushing
+    buffers).
+
+    Should be called at application exit.
+    """
+    for wr in reversed(handlerList[:]):
+        #errors might occur, for example, if files are locked
+        #we just ignore them if raiseExceptions is not set
+        try:
+            h = wr()
+            if h:
+                try:
+                    h.acquire()
+                    h.flush()
+                    h.close()
+                except (IOError, ValueError):
+                    # Ignore errors which might be caused
+                    # because handlers have been closed but
+                    # references to them are still around at
+                    # application exit.
+                    pass
+                finally:
+                    h.release()
+        except:
+            if raiseExceptions:
+                raise
+            #else, swallow
+
+#Let's try and shutdown automatically on application exit...
+import atexit
+atexit.register(shutdown)
+
+# Null handler
+
+class NullHandler(Handler):
+    """
+    This handler does nothing. It's intended to be used to avoid the
+    "No handlers could be found for logger XXX" one-off warning. This is
+    important for library code, which may contain code to log events. If a user
+    of the library does not configure logging, the one-off warning might be
+    produced; to avoid this, the library developer simply needs to instantiate
+    a NullHandler and add it to the top-level logger of the library module or
+    package.
+    """
+    def handle(self, record):
+        pass
+
+    def emit(self, record):
+        pass
+
+    def createLock(self):
+        self.lock = None
+
+# Warnings integration
+
+any _warnings_showwarning = None
+
+def _showwarning(message, category, filename, lineno, file=None, line=None):
+    """
+    Implementation of showwarnings which redirects to logging, which will first
+    check to see if the file parameter is None. If a file is specified, it will
+    delegate to the original warnings implementation of showwarning. Otherwise,
+    it will call warnings.formatwarning and will log the resulting string to a
+    warnings logger named "py.warnings" with level logging.WARNING.
+    """
+    if file is not None:
+        if _warnings_showwarning is not None:
+            _warnings_showwarning(message, category, filename, lineno, file, line)
+    else:
+        s = warnings.formatwarning(message, category, filename, lineno, line)
+        logger = getLogger("py.warnings")
+        if not logger.handlers:
+            logger.addHandler(NullHandler())
+        logger.warning("%s", s)
+
+def captureWarnings(capture):
+    """
+    If capture is true, redirect all warnings to the logging package.
+    If capture is False, ensure that warnings are not redirected to logging
+    but to their original destinations.
+    """
+    global _warnings_showwarning
+    if capture:
+        if _warnings_showwarning is None:
+            _warnings_showwarning = warnings.showwarning
+            warnings.showwarning = _showwarning
+    else:
+        if _warnings_showwarning is not None:
+            warnings.showwarning = _warnings_showwarning
+            _warnings_showwarning = None
diff --git a/test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py b/test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test-data/stdlib-samples/3.2/incomplete/urllib/parse.py b/test-data/stdlib-samples/3.2/incomplete/urllib/parse.py
new file mode 100644
index 0000000..4fa65c4
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/incomplete/urllib/parse.py
@@ -0,0 +1,980 @@
+"""Parse (absolute and relative) URLs.
+
+urlparse module is based upon the following RFC specifications.
+
+RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding
+and L.  Masinter, January 2005.
+
+RFC 2732 : "Format for Literal IPv6 Addresses in URLs" by R.Hinden, B.Carpenter
+and L.Masinter, December 1999.
+
+RFC 2396:  "Uniform Resource Identifiers (URI)": Generic Syntax by T.
+Berners-Lee, R. Fielding, and L. Masinter, August 1998.
+
+RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998.
+
+RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June
+1995.
+
+RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M.
+McCahill, December 1994
+
+RFC 3986 is considered the current standard and any future changes to
+urlparse module should conform with it.  The urlparse module is
+currently not entirely compliant with this RFC due to defacto
+scenarios for parsing, and for backward compatibility purposes, some
+parsing quirks from older RFCs are retained. The testcases in
+test_urlparse.py provides a good indicator of parsing behavior.
+"""
+
+import sys
+import collections
+
+__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
+           "urlsplit", "urlunsplit", "urlencode", "parse_qs",
+           "parse_qsl", "quote", "quote_plus", "quote_from_bytes",
+           "unquote", "unquote_plus", "unquote_to_bytes"]
+
+# A classification of schemes ('' means apply by default)
+uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
+                 'wais', 'file', 'https', 'shttp', 'mms',
+                 'prospero', 'rtsp', 'rtspu', '', 'sftp',
+                 'svn', 'svn+ssh']
+uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet',
+               'imap', 'wais', 'file', 'mms', 'https', 'shttp',
+               'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
+               'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh']
+non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
+                    'telnet', 'wais', 'imap', 'snews', 'sip', 'sips']
+uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
+               'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
+               'mms', '', 'sftp']
+uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
+              'gopher', 'rtsp', 'rtspu', 'sip', 'sips', '']
+uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
+                 'nntp', 'wais', 'https', 'shttp', 'snews',
+                 'file', 'prospero', '']
+
+# Characters valid in scheme names
+scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
+                'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+                '0123456789'
+                '+-.')
+
+# XXX: Consider replacing with functools.lru_cache
+MAX_CACHE_SIZE = 20
+_parse_cache = {}
+
+def clear_cache():
+    """Clear the parse cache and the quoters cache."""
+    _parse_cache.clear()
+    _safe_quoters.clear()
+
+
+# Helpers for bytes handling
+# For 3.2, we deliberately require applications that
+# handle improperly quoted URLs to do their own
+# decoding and encoding. If valid use cases are
+# presented, we may relax this by using latin-1
+# decoding internally for 3.3
+_implicit_encoding = 'ascii'
+_implicit_errors = 'strict'
+
+def _noop(obj):
+    return obj
+
+def _encode_result(obj, encoding=_implicit_encoding,
+                        errors=_implicit_errors):
+    return obj.encode(encoding, errors)
+
+def _decode_args(args, encoding=_implicit_encoding,
+                       errors=_implicit_errors):
+    return tuple(x.decode(encoding, errors) if x else '' for x in args)
+
+def _coerce_args(*args):
+    # Invokes decode if necessary to create str args
+    # and returns the coerced inputs along with
+    # an appropriate result coercion function
+    #   - noop for str inputs
+    #   - encoding function otherwise
+    str_input = isinstance(args[0], str)
+    for arg in args[1:]:
+        # We special-case the empty string to support the
+        # "scheme=''" default argument to some functions
+        if arg and isinstance(arg, str) != str_input:
+            raise TypeError("Cannot mix str and non-str arguments")
+    if str_input:
+        return args + (_noop,)
+    return _decode_args(args) + (_encode_result,)
+
+# Result objects are more helpful than simple tuples
+class _ResultMixinStr(object):
+    """Standard approach to encoding parsed results from str to bytes"""
+    __slots__ = ()
+
+    def encode(self, encoding='ascii', errors='strict'):
+        return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self))
+
+
+class _ResultMixinBytes(object):
+    """Standard approach to decoding parsed results from bytes to str"""
+    __slots__ = ()
+
+    def decode(self, encoding='ascii', errors='strict'):
+        return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self))
+
+
+class _NetlocResultMixinBase(object):
+    """Shared methods for the parsed result objects containing a netloc element"""
+    __slots__ = ()
+
+    @property
+    def username(self):
+        return self._userinfo[0]
+
+    @property
+    def password(self):
+        return self._userinfo[1]
+
+    @property
+    def hostname(self):
+        hostname = self._hostinfo[0]
+        if not hostname:
+            hostname = None
+        elif hostname is not None:
+            hostname = hostname.lower()
+        return hostname
+
+    @property
+    def port(self):
+        port = self._hostinfo[1]
+        if port is not None:
+            port = int(port, 10)
+        return port
+
+
+class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr):
+    __slots__ = ()
+
+    @property
+    def _userinfo(self):
+        netloc = self.netloc
+        userinfo, have_info, hostinfo = netloc.rpartition('@')
+        if have_info:
+            username, have_password, password = userinfo.partition(':')
+            if not have_password:
+                password = None
+        else:
+            username = password = None
+        return username, password
+
+    @property
+    def _hostinfo(self):
+        netloc = self.netloc
+        _, _, hostinfo = netloc.rpartition('@')
+        _, have_open_br, bracketed = hostinfo.partition('[')
+        if have_open_br:
+            hostname, _, port = bracketed.partition(']')
+            _, have_port, port = port.partition(':')
+        else:
+            hostname, have_port, port = hostinfo.partition(':')
+        if not have_port:
+            port = None
+        return hostname, port
+
+
+class _NetlocResultMixinBytes(_NetlocResultMixinBase, _ResultMixinBytes):
+    __slots__ = ()
+
+    @property
+    def _userinfo(self):
+        netloc = self.netloc
+        userinfo, have_info, hostinfo = netloc.rpartition(b'@')
+        if have_info:
+            username, have_password, password = userinfo.partition(b':')
+            if not have_password:
+                password = None
+        else:
+            username = password = None
+        return username, password
+
+    @property
+    def _hostinfo(self):
+        netloc = self.netloc
+        _, _, hostinfo = netloc.rpartition(b'@')
+        _, have_open_br, bracketed = hostinfo.partition(b'[')
+        if have_open_br:
+            hostname, _, port = bracketed.partition(b']')
+            _, have_port, port = port.partition(b':')
+        else:
+            hostname, have_port, port = hostinfo.partition(b':')
+        if not have_port:
+            port = None
+        return hostname, port
+
+
+from collections import namedtuple
+
+_DefragResultBase = namedtuple('DefragResult', 'url fragment')
+_SplitResultBase = namedtuple('SplitResult', 'scheme netloc path query fragment')
+_ParseResultBase = namedtuple('ParseResult', 'scheme netloc path params query fragment')
+
+# For backwards compatibility, alias _NetlocResultMixinStr
+# ResultBase is no longer part of the documented API, but it is
+# retained since deprecating it isn't worth the hassle
+ResultBase = _NetlocResultMixinStr
+
+# Structured result objects for string data
+class DefragResult(_DefragResultBase, _ResultMixinStr):
+    __slots__ = ()
+    def geturl(self):
+        if self.fragment:
+            return self.url + '#' + self.fragment
+        else:
+            return self.url
+
+class SplitResult(_SplitResultBase, _NetlocResultMixinStr):
+    __slots__ = ()
+    def geturl(self):
+        return urlunsplit(self)
+
+class ParseResult(_ParseResultBase, _NetlocResultMixinStr):
+    __slots__ = ()
+    def geturl(self):
+        return urlunparse(self)
+
+# Structured result objects for bytes data
+class DefragResultBytes(_DefragResultBase, _ResultMixinBytes):
+    __slots__ = ()
+    def geturl(self):
+        if self.fragment:
+            return self.url + b'#' + self.fragment
+        else:
+            return self.url
+
+class SplitResultBytes(_SplitResultBase, _NetlocResultMixinBytes):
+    __slots__ = ()
+    def geturl(self):
+        return urlunsplit(self)
+
+class ParseResultBytes(_ParseResultBase, _NetlocResultMixinBytes):
+    __slots__ = ()
+    def geturl(self):
+        return urlunparse(self)
+
+# Set up the encode/decode result pairs
+def _fix_result_transcoding():
+    _result_pairs = (
+        (DefragResult, DefragResultBytes),
+        (SplitResult, SplitResultBytes),
+        (ParseResult, ParseResultBytes),
+    )
+    for _decoded, _encoded in _result_pairs:
+        _decoded._encoded_counterpart = _encoded
+        _encoded._decoded_counterpart = _decoded
+
+_fix_result_transcoding()
+del _fix_result_transcoding
+
+def urlparse(url, scheme='', allow_fragments=True):
+    """Parse a URL into 6 components:
+    <scheme>://<netloc>/<path>;<params>?<query>#<fragment>
+    Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
+    Note that we don't break the components up in smaller bits
+    (e.g. netloc is a single string) and we don't expand % escapes."""
+    url, scheme, _coerce_result = _coerce_args(url, scheme)
+    tuple = urlsplit(url, scheme, allow_fragments)
+    scheme, netloc, url, query, fragment = tuple
+    if scheme in uses_params and ';' in url:
+        url, params = _splitparams(url)
+    else:
+        params = ''
+    result = ParseResult(scheme, netloc, url, params, query, fragment)
+    return _coerce_result(result)
+
+def _splitparams(url):
+    if '/'  in url:
+        i = url.find(';', url.rfind('/'))
+        if i < 0:
+            return url, ''
+    else:
+        i = url.find(';')
+    return url[:i], url[i+1:]
+
+def _splitnetloc(url, start=0):
+    delim = len(url)   # position of end of domain part of url, default is end
+    for c in '/?#':    # look for delimiters; the order is NOT important
+        wdelim = url.find(c, start)        # find first of this delim
+        if wdelim >= 0:                    # if found
+            delim = min(delim, wdelim)     # use earliest delim position
+    return url[start:delim], url[delim:]   # return (domain, rest)
+
+def urlsplit(url, scheme='', allow_fragments=True):
+    """Parse a URL into 5 components:
+    <scheme>://<netloc>/<path>?<query>#<fragment>
+    Return a 5-tuple: (scheme, netloc, path, query, fragment).
+    Note that we don't break the components up in smaller bits
+    (e.g. netloc is a single string) and we don't expand % escapes."""
+    url, scheme, _coerce_result = _coerce_args(url, scheme)
+    allow_fragments = bool(allow_fragments)
+    key = url, scheme, allow_fragments, type(url), type(scheme)
+    cached = _parse_cache.get(key, None)
+    if cached:
+        return _coerce_result(cached)
+    if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
+        clear_cache()
+    netloc = query = fragment = ''
+    i = url.find(':')
+    if i > 0:
+        if url[:i] == 'http': # optimize the common case
+            scheme = url[:i].lower()
+            url = url[i+1:]
+            if url[:2] == '//':
+                netloc, url = _splitnetloc(url, 2)
+                if (('[' in netloc and ']' not in netloc) or
+                        (']' in netloc and '[' not in netloc)):
+                    raise ValueError("Invalid IPv6 URL")
+            if allow_fragments and '#' in url:
+                url, fragment = url.split('#', 1)
+            if '?' in url:
+                url, query = url.split('?', 1)
+            v = SplitResult(scheme, netloc, url, query, fragment)
+            _parse_cache[key] = v
+            return _coerce_result(v)
+        for c in url[:i]:
+            if c not in scheme_chars:
+                break
+        else:
+            try:
+                # make sure "url" is not actually a port number (in which case
+                # "scheme" is really part of the path
+                _testportnum = int(url[i+1:])
+            except ValueError:
+                scheme, url = url[:i].lower(), url[i+1:]
+
+    if url[:2] == '//':
+        netloc, url = _splitnetloc(url, 2)
+        if (('[' in netloc and ']' not in netloc) or
+                (']' in netloc and '[' not in netloc)):
+            raise ValueError("Invalid IPv6 URL")
+    if allow_fragments and scheme in uses_fragment and '#' in url:
+        url, fragment = url.split('#', 1)
+    if scheme in uses_query and '?' in url:
+        url, query = url.split('?', 1)
+    v = SplitResult(scheme, netloc, url, query, fragment)
+    _parse_cache[key] = v
+    return _coerce_result(v)
+
+def urlunparse(components):
+    """Put a parsed URL back together again.  This may result in a
+    slightly different, but equivalent URL, if the URL that was parsed
+    originally had redundant delimiters, e.g. a ? with an empty query
+    (the draft states that these are equivalent)."""
+    scheme, netloc, url, params, query, fragment, _coerce_result = (
+                                                  _coerce_args(*components))
+    if params:
+        url = "%s;%s" % (url, params)
+    return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment)))
+
+def urlunsplit(components):
+    """Combine the elements of a tuple as returned by urlsplit() into a
+    complete URL as a string. The data argument can be any five-item iterable.
+    This may result in a slightly different, but equivalent URL, if the URL that
+    was parsed originally had unnecessary delimiters (for example, a ? with an
+    empty query; the RFC states that these are equivalent)."""
+    scheme, netloc, url, query, fragment, _coerce_result = (
+                                          _coerce_args(*components))
+    if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'):
+        if url and url[:1] != '/': url = '/' + url
+        url = '//' + (netloc or '') + url
+    if scheme:
+        url = scheme + ':' + url
+    if query:
+        url = url + '?' + query
+    if fragment:
+        url = url + '#' + fragment
+    return _coerce_result(url)
+
+def urljoin(base, url, allow_fragments=True):
+    """Join a base URL and a possibly relative URL to form an absolute
+    interpretation of the latter."""
+    if not base:
+        return url
+    if not url:
+        return base
+    base, url, _coerce_result = _coerce_args(base, url)
+    bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
+            urlparse(base, '', allow_fragments)
+    scheme, netloc, path, params, query, fragment = \
+            urlparse(url, bscheme, allow_fragments)
+    if scheme != bscheme or scheme not in uses_relative:
+        return _coerce_result(url)
+    if scheme in uses_netloc:
+        if netloc:
+            return _coerce_result(urlunparse((scheme, netloc, path,
+                                              params, query, fragment)))
+        netloc = bnetloc
+    if path[:1] == '/':
+        return _coerce_result(urlunparse((scheme, netloc, path,
+                                          params, query, fragment)))
+    if not path and not params:
+        path = bpath
+        params = bparams
+        if not query:
+            query = bquery
+        return _coerce_result(urlunparse((scheme, netloc, path,
+                                          params, query, fragment)))
+    segments = bpath.split('/')[:-1] + path.split('/')
+    # XXX The stuff below is bogus in various ways...
+    if segments[-1] == '.':
+        segments[-1] = ''
+    while '.' in segments:
+        segments.remove('.')
+    while 1:
+        i = 1
+        n = len(segments) - 1
+        while i < n:
+            if (segments[i] == '..'
+                and segments[i-1] not in ('', '..')):
+                del segments[i-1:i+1]
+                break
+            i = i+1
+        else:
+            break
+    if segments == ['', '..']:
+        segments[-1] = ''
+    elif len(segments) >= 2 and segments[-1] == '..':
+        segments[-2:] = ['']
+    return _coerce_result(urlunparse((scheme, netloc, '/'.join(segments),
+                                      params, query, fragment)))
+
+def urldefrag(url):
+    """Removes any existing fragment from URL.
+
+    Returns a tuple of the defragmented URL and the fragment.  If
+    the URL contained no fragments, the second element is the
+    empty string.
+    """
+    url, _coerce_result = _coerce_args(url)
+    if '#' in url:
+        s, n, p, a, q, frag = urlparse(url)
+        defrag = urlunparse((s, n, p, a, q, ''))
+    else:
+        frag = ''
+        defrag = url
+    return _coerce_result(DefragResult(defrag, frag))
+
+def unquote_to_bytes(string):
+    """unquote_to_bytes('abc%20def') -> b'abc def'."""
+    # Note: strings are encoded as UTF-8. This is only an issue if it contains
+    # unescaped non-ASCII characters, which URIs should not.
+    if not string:
+        # Is it a string-like object?
+        string.split
+        return b''
+    if isinstance(string, str):
+        string = string.encode('utf-8')
+    res = string.split(b'%')
+    if len(res) == 1:
+        return string
+    string = res[0]
+    for item in res[1:]:
+        try:
+            string += bytes([int(item[:2], 16)]) + item[2:]
+        except ValueError:
+            string += b'%' + item
+    return string
+
+def unquote(string, encoding='utf-8', errors='replace'):
+    """Replace %xx escapes by their single-character equivalent. The optional
+    encoding and errors parameters specify how to decode percent-encoded
+    sequences into Unicode characters, as accepted by the bytes.decode()
+    method.
+    By default, percent-encoded sequences are decoded with UTF-8, and invalid
+    sequences are replaced by a placeholder character.
+
+    unquote('abc%20def') -> 'abc def'.
+    """
+    if string == '':
+        return string
+    res = string.split('%')
+    if len(res) == 1:
+        return string
+    if encoding is None:
+        encoding = 'utf-8'
+    if errors is None:
+        errors = 'replace'
+    # pct_sequence: contiguous sequence of percent-encoded bytes, decoded
+    pct_sequence = b''
+    string = res[0]
+    for item in res[1:]:
+        try:
+            if not item:
+                raise ValueError
+            pct_sequence += bytes.fromhex(item[:2])
+            rest = item[2:]
+            if not rest:
+                # This segment was just a single percent-encoded character.
+                # May be part of a sequence of code units, so delay decoding.
+                # (Stored in pct_sequence).
+                continue
+        except ValueError:
+            rest = '%' + item
+        # Encountered non-percent-encoded characters. Flush the current
+        # pct_sequence.
+        string += pct_sequence.decode(encoding, errors) + rest
+        pct_sequence = b''
+    if pct_sequence:
+        # Flush the final pct_sequence
+        string += pct_sequence.decode(encoding, errors)
+    return string
+
+def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
+             encoding='utf-8', errors='replace'):
+    """Parse a query given as a string argument.
+
+        Arguments:
+
+        qs: percent-encoded query string to be parsed
+
+        keep_blank_values: flag indicating whether blank values in
+            percent-encoded queries should be treated as blank strings.
+            A true value indicates that blanks should be retained as
+            blank strings.  The default false value indicates that
+            blank values are to be ignored and treated as if they were
+            not included.
+
+        strict_parsing: flag indicating what to do with parsing errors.
+            If false (the default), errors are silently ignored.
+            If true, errors raise a ValueError exception.
+
+        encoding and errors: specify how to decode percent-encoded sequences
+            into Unicode characters, as accepted by the bytes.decode() method.
+    """
+    dict = {}
+    pairs = parse_qsl(qs, keep_blank_values, strict_parsing,
+                      encoding=encoding, errors=errors)
+    for name, value in pairs:
+        if name in dict:
+            dict[name].append(value)
+        else:
+            dict[name] = [value]
+    return dict
+
+def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
+              encoding='utf-8', errors='replace'):
+    """Parse a query given as a string argument.
+
+    Arguments:
+
+    qs: percent-encoded query string to be parsed
+
+    keep_blank_values: flag indicating whether blank values in
+        percent-encoded queries should be treated as blank strings.  A
+        true value indicates that blanks should be retained as blank
+        strings.  The default false value indicates that blank values
+        are to be ignored and treated as if they were  not included.
+
+    strict_parsing: flag indicating what to do with parsing errors. If
+        false (the default), errors are silently ignored. If true,
+        errors raise a ValueError exception.
+
+    encoding and errors: specify how to decode percent-encoded sequences
+        into Unicode characters, as accepted by the bytes.decode() method.
+
+    Returns a list, as G-d intended.
+    """
+    qs, _coerce_result = _coerce_args(qs)
+    pairs = []
+    for s1 in qs.split('&'):
+        for s2 in s1.split(';'):
+            pairs.append(s2)
+    r = []
+    for name_value in pairs:
+        if not name_value and not strict_parsing:
+            continue
+        nv = name_value.split('=', 1)
+        if len(nv) != 2:
+            if strict_parsing:
+                raise ValueError("bad query field: %r" % (name_value,))
+            # Handle case of a control-name with no equal sign
+            if keep_blank_values:
+                nv.append('')
+            else:
+                continue
+        if len(nv[1]) or keep_blank_values:
+            name = nv[0].replace('+', ' ')
+            name = unquote(name, encoding=encoding, errors=errors)
+            name = _coerce_result(name)
+            value = nv[1].replace('+', ' ')
+            value = unquote(value, encoding=encoding, errors=errors)
+            value = _coerce_result(value)
+            r.append((name, value))
+    return r
+
+def unquote_plus(string, encoding='utf-8', errors='replace'):
+    """Like unquote(), but also replace plus signs by spaces, as required for
+    unquoting HTML form values.
+
+    unquote_plus('%7e/abc+def') -> '~/abc def'
+    """
+    string = string.replace('+', ' ')
+    return unquote(string, encoding, errors)
+
+_ALWAYS_SAFE = frozenset(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+                         b'abcdefghijklmnopqrstuvwxyz'
+                         b'0123456789'
+                         b'_.-')
+_ALWAYS_SAFE_BYTES = bytes(_ALWAYS_SAFE)
+_safe_quoters = {}
+
+class Quoter(collections.defaultdict):
+    """A mapping from bytes (in range(0,256)) to strings.
+
+    String values are percent-encoded byte values, unless the key < 128, and
+    in the "safe" set (either the specified safe set, or default set).
+    """
+    # Keeps a cache internally, using defaultdict, for efficiency (lookups
+    # of cached keys don't call Python code at all).
+    def __init__(self, safe):
+        """safe: bytes object."""
+        self.safe = _ALWAYS_SAFE.union(safe)
+
+    def __repr__(self):
+        # Without this, will just display as a defaultdict
+        return "<Quoter %r>" % dict(self)
+
+    def __missing__(self, b):
+        # Handle a cache miss. Store quoted string in cache and return.
+        res = chr(b) if b in self.safe else '%{:02X}'.format(b)
+        self[b] = res
+        return res
+
+def quote(string, safe='/', encoding=None, errors=None):
+    """quote('abc def') -> 'abc%20def'
+
+    Each part of a URL, e.g. the path info, the query, etc., has a
+    different set of reserved characters that must be quoted.
+
+    RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists
+    the following reserved characters.
+
+    reserved    = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
+                  "$" | ","
+
+    Each of these characters is reserved in some component of a URL,
+    but not necessarily in all of them.
+
+    By default, the quote function is intended for quoting the path
+    section of a URL.  Thus, it will not encode '/'.  This character
+    is reserved, but in typical usage the quote function is being
+    called on a path where the existing slash characters are used as
+    reserved characters.
+
+    string and safe may be either str or bytes objects. encoding must
+    not be specified if string is a str.
+
+    The optional encoding and errors parameters specify how to deal with
+    non-ASCII characters, as accepted by the str.encode method.
+    By default, encoding='utf-8' (characters are encoded with UTF-8), and
+    errors='strict' (unsupported characters raise a UnicodeEncodeError).
+    """
+    if isinstance(string, str):
+        if not string:
+            return string
+        if encoding is None:
+            encoding = 'utf-8'
+        if errors is None:
+            errors = 'strict'
+        string = string.encode(encoding, errors)
+    else:
+        if encoding is not None:
+            raise TypeError("quote() doesn't support 'encoding' for bytes")
+        if errors is not None:
+            raise TypeError("quote() doesn't support 'errors' for bytes")
+    return quote_from_bytes(string, safe)
+
+def quote_plus(string, safe='', encoding=None, errors=None):
+    """Like quote(), but also replace ' ' with '+', as required for quoting
+    HTML form values. Plus signs in the original string are escaped unless
+    they are included in safe. It also does not have safe default to '/'.
+    """
+    # Check if ' ' in string, where string may either be a str or bytes.  If
+    # there are no spaces, the regular quote will produce the right answer.
+    if ((isinstance(string, str) and ' ' not in string) or
+        (isinstance(string, bytes) and b' ' not in string)):
+        return quote(string, safe, encoding, errors)
+    if isinstance(safe, str):
+        space = ' '
+    else:
+        space = b' '
+    string = quote(string, safe + space, encoding, errors)
+    return string.replace(' ', '+')
+
+def quote_from_bytes(bs, safe='/'):
+    """Like quote(), but accepts a bytes object rather than a str, and does
+    not perform string-to-bytes encoding.  It always returns an ASCII string.
+    quote_from_bytes(b'abc def\xab') -> 'abc%20def%AB'
+    """
+    if not isinstance(bs, (bytes, bytearray)):
+        raise TypeError("quote_from_bytes() expected bytes")
+    if not bs:
+        return ''
+    if isinstance(safe, str):
+        # Normalize 'safe' by converting to bytes and removing non-ASCII chars
+        safe = safe.encode('ascii', 'ignore')
+    else:
+        safe = bytes([c for c in safe if c < 128])
+    if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe):
+        return bs.decode()
+    try:
+        quoter = _safe_quoters[safe]
+    except KeyError:
+        _safe_quoters[safe] = quoter = Quoter(safe).__getitem__
+    return ''.join([quoter(char) for char in bs])
+
+def urlencode(query, doseq=False, safe='', encoding=None, errors=None):
+    """Encode a sequence of two-element tuples or dictionary into a URL query string.
+
+    If any values in the query arg are sequences and doseq is true, each
+    sequence element is converted to a separate parameter.
+
+    If the query arg is a sequence of two-element tuples, the order of the
+    parameters in the output will match the order of parameters in the
+    input.
+
+    The query arg may be either a string or a bytes type. When query arg is a
+    string, the safe, encoding and error parameters are sent the quote_plus for
+    encoding.
+    """
+
+    if hasattr(query, "items"):
+        query = query.items()
+    else:
+        # It's a bother at times that strings and string-like objects are
+        # sequences.
+        try:
+            # non-sequence items should not work with len()
+            # non-empty strings will fail this
+            if len(query) and not isinstance(query[0], tuple):
+                raise TypeError
+            # Zero-length sequences of all types will get here and succeed,
+            # but that's a minor nit.  Since the original implementation
+            # allowed empty dicts that type of behavior probably should be
+            # preserved for consistency
+        except TypeError:
+            ty, va, tb = sys.exc_info()
+            raise TypeError("not a valid non-string sequence "
+                            "or mapping object").with_traceback(tb)
+
+    l = []
+    if not doseq:
+        for k, v in query:
+            if isinstance(k, bytes):
+                k = quote_plus(k, safe)
+            else:
+                k = quote_plus(str(k), safe, encoding, errors)
+
+            if isinstance(v, bytes):
+                v = quote_plus(v, safe)
+            else:
+                v = quote_plus(str(v), safe, encoding, errors)
+            l.append(k + '=' + v)
+    else:
+        for k, v in query:
+            if isinstance(k, bytes):
+                k = quote_plus(k, safe)
+            else:
+                k = quote_plus(str(k), safe, encoding, errors)
+
+            if isinstance(v, bytes):
+                v = quote_plus(v, safe)
+                l.append(k + '=' + v)
+            elif isinstance(v, str):
+                v = quote_plus(v, safe, encoding, errors)
+                l.append(k + '=' + v)
+            else:
+                try:
+                    # Is this a sufficient test for sequence-ness?
+                    x = len(v)
+                except TypeError:
+                    # not a sequence
+                    v = quote_plus(str(v), safe, encoding, errors)
+                    l.append(k + '=' + v)
+                else:
+                    # loop over the sequence
+                    for elt in v:
+                        if isinstance(elt, bytes):
+                            elt = quote_plus(elt, safe)
+                        else:
+                            elt = quote_plus(str(elt), safe, encoding, errors)
+                        l.append(k + '=' + elt)
+    return '&'.join(l)
+
+# Utilities to parse URLs (most of these return None for missing parts):
+# unwrap('<URL:type://host/path>') --> 'type://host/path'
+# splittype('type:opaquestring') --> 'type', 'opaquestring'
+# splithost('//host[:port]/path') --> 'host[:port]', '/path'
+# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'
+# splitpasswd('user:passwd') -> 'user', 'passwd'
+# splitport('host:port') --> 'host', 'port'
+# splitquery('/path?query') --> '/path', 'query'
+# splittag('/path#tag') --> '/path', 'tag'
+# splitattr('/path;attr1=value1;attr2=value2;...') ->
+#   '/path', ['attr1=value1', 'attr2=value2', ...]
+# splitvalue('attr=value') --> 'attr', 'value'
+# urllib.parse.unquote('abc%20def') -> 'abc def'
+# quote('abc def') -> 'abc%20def')
+
+def to_bytes(url):
+    """to_bytes(u"URL") --> 'URL'."""
+    # Most URL schemes require ASCII. If that changes, the conversion
+    # can be relaxed.
+    # XXX get rid of to_bytes()
+    if isinstance(url, str):
+        try:
+            url = url.encode("ASCII").decode()
+        except UnicodeError:
+            raise UnicodeError("URL " + repr(url) +
+                               " contains non-ASCII characters")
+    return url
+
+def unwrap(url):
+    """unwrap('<URL:type://host/path>') --> 'type://host/path'."""
+    url = str(url).strip()
+    if url[:1] == '<' and url[-1:] == '>':
+        url = url[1:-1].strip()
+    if url[:4] == 'URL:': url = url[4:].strip()
+    return url
+
+_typeprog = None
+def splittype(url):
+    """splittype('type:opaquestring') --> 'type', 'opaquestring'."""
+    global _typeprog
+    if _typeprog is None:
+        import re
+        _typeprog = re.compile('^([^/:]+):')
+
+    match = _typeprog.match(url)
+    if match:
+        scheme = match.group(1)
+        return scheme.lower(), url[len(scheme) + 1:]
+    return None, url
+
+_hostprog = None
+def splithost(url):
+    """splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
+    global _hostprog
+    if _hostprog is None:
+        import re
+        _hostprog = re.compile('^//([^/?]*)(.*)$')
+
+    match = _hostprog.match(url)
+    if match:
+        host_port = match.group(1)
+        path = match.group(2)
+        if path and not path.startswith('/'):
+            path = '/' + path
+        return host_port, path
+    return None, url
+
+_userprog = None
+def splituser(host):
+    """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
+    global _userprog
+    if _userprog is None:
+        import re
+        _userprog = re.compile('^(.*)@(.*)$')
+
+    match = _userprog.match(host)
+    if match: return match.group(1, 2)
+    return None, host
+
+_passwdprog = None
+def splitpasswd(user):
+    """splitpasswd('user:passwd') -> 'user', 'passwd'."""
+    global _passwdprog
+    if _passwdprog is None:
+        import re
+        _passwdprog = re.compile('^([^:]*):(.*)$',re.S)
+
+    match = _passwdprog.match(user)
+    if match: return match.group(1, 2)
+    return user, None
+
+# splittag('/path#tag') --> '/path', 'tag'
+_portprog = None
+def splitport(host):
+    """splitport('host:port') --> 'host', 'port'."""
+    global _portprog
+    if _portprog is None:
+        import re
+        _portprog = re.compile('^(.*):([0-9]+)$')
+
+    match = _portprog.match(host)
+    if match: return match.group(1, 2)
+    return host, None
+
+_nportprog = None
+def splitnport(host, defport=-1):
+    """Split host and port, returning numeric port.
+    Return given default port if no ':' found; defaults to -1.
+    Return numerical port if a valid number are found after ':'.
+    Return None if ':' but not a valid number."""
+    global _nportprog
+    if _nportprog is None:
+        import re
+        _nportprog = re.compile('^(.*):(.*)$')
+
+    match = _nportprog.match(host)
+    if match:
+        host, port = match.group(1, 2)
+        try:
+            if not port: raise ValueError("no digits")
+            nport = int(port)
+        except ValueError:
+            nport = None
+        return host, nport
+    return host, defport
+
+_queryprog = None
+def splitquery(url):
+    """splitquery('/path?query') --> '/path', 'query'."""
+    global _queryprog
+    if _queryprog is None:
+        import re
+        _queryprog = re.compile('^(.*)\?([^?]*)$')
+
+    match = _queryprog.match(url)
+    if match: return match.group(1, 2)
+    return url, None
+
+_tagprog = None
+def splittag(url):
+    """splittag('/path#tag') --> '/path', 'tag'."""
+    global _tagprog
+    if _tagprog is None:
+        import re
+        _tagprog = re.compile('^(.*)#([^#]*)$')
+
+    match = _tagprog.match(url)
+    if match: return match.group(1, 2)
+    return url, None
+
+def splitattr(url):
+    """splitattr('/path;attr1=value1;attr2=value2;...') ->
+        '/path', ['attr1=value1', 'attr2=value2', ...]."""
+    words = url.split(';')
+    return words[0], words[1:]
+
+_valueprog = None
+def splitvalue(attr):
+    """splitvalue('attr=value') --> 'attr', 'value'."""
+    global _valueprog
+    if _valueprog is None:
+        import re
+        _valueprog = re.compile('^([^=]*)=(.*)$')
+
+    match = _valueprog.match(attr)
+    if match: return match.group(1, 2)
+    return attr, None
diff --git a/test-data/stdlib-samples/3.2/posixpath.py b/test-data/stdlib-samples/3.2/posixpath.py
new file mode 100644
index 0000000..cf5d59e
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/posixpath.py
@@ -0,0 +1,466 @@
+"""Common operations on Posix pathnames.
+
+Instead of importing this module directly, import os and refer to
+this module as os.path.  The "os.path" name is an alias for this
+module on Posix systems; on other systems (e.g. Mac, Windows),
+os.path provides the same operations in a manner specific to that
+platform, and is an alias to another module (e.g. macpath, ntpath).
+
+Some of this can actually be useful on non-Posix systems too, e.g.
+for manipulation of the pathname component of URLs.
+"""
+
+import os
+import sys
+import stat
+import genericpath
+from genericpath import *
+
+from typing import (
+    Tuple, BinaryIO, TextIO, Pattern, AnyStr, List, Set, Any, Union, cast
+)
+
+__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
+           "basename","dirname","commonprefix","getsize","getmtime",
+           "getatime","getctime","islink","exists","lexists","isdir","isfile",
+           "ismount", "expanduser","expandvars","normpath","abspath",
+           "samefile","sameopenfile","samestat",
+           "curdir","pardir","sep","pathsep","defpath","altsep","extsep",
+           "devnull","realpath","supports_unicode_filenames","relpath"]
+
+# Strings representing various path-related bits and pieces.
+# These are primarily for export; internally, they are hardcoded.
+curdir = '.'
+pardir = '..'
+extsep = '.'
+sep = '/'
+pathsep = ':'
+defpath = ':/bin:/usr/bin'
+altsep = None # type: str
+devnull = '/dev/null'
+
+def _get_sep(path: AnyStr) -> AnyStr:
+    if isinstance(path, bytes):
+        return b'/'
+    else:
+        return '/'
+
+# Normalize the case of a pathname.  Trivial in Posix, string.lower on Mac.
+# On MS-DOS this may also turn slashes into backslashes; however, other
+# normalizations (such as optimizing '../' away) are not allowed
+# (another function should be defined to do that).
+
+def normcase(s: AnyStr) -> AnyStr:
+    """Normalize case of pathname.  Has no effect under Posix"""
+    # TODO: on Mac OS X, this should really return s.lower().
+    if not isinstance(s, (bytes, str)):
+        raise TypeError("normcase() argument must be str or bytes, "
+                        "not '{}'".format(s.__class__.__name__))
+    return cast(AnyStr, s)
+
+
+# Return whether a path is absolute.
+# Trivial in Posix, harder on the Mac or MS-DOS.
+
+def isabs(s: AnyStr) -> bool:
+    """Test whether a path is absolute"""
+    sep = _get_sep(s)
+    return s.startswith(sep)
+
+
+# Join pathnames.
+# Ignore the previous parts if a part is absolute.
+# Insert a '/' unless the first part is empty or already ends in '/'.
+
+def join(a: AnyStr, *p: AnyStr) -> AnyStr:
+    """Join two or more pathname components, inserting '/' as needed.
+    If any component is an absolute path, all previous path components
+    will be discarded."""
+    sep = _get_sep(a)
+    path = a
+    for b in p:
+        if b.startswith(sep):
+            path = b
+        elif not path or path.endswith(sep):
+            path +=  b
+        else:
+            path += sep + b
+    return path
+
+
+# Split a path in head (everything up to the last '/') and tail (the
+# rest).  If the path ends in '/', tail will be empty.  If there is no
+# '/' in the path, head  will be empty.
+# Trailing '/'es are stripped from head unless it is the root.
+
+def split(p: AnyStr) -> Tuple[AnyStr, AnyStr]:
+    """Split a pathname.  Returns tuple "(head, tail)" where "tail" is
+    everything after the final slash.  Either part may be empty."""
+    sep = _get_sep(p)
+    i = p.rfind(sep) + 1
+    head, tail = p[:i], p[i:]
+    if head and head != sep*len(head):
+        head = head.rstrip(sep)
+    return head, tail
+
+
+# Split a path in root and extension.
+# The extension is everything starting at the last dot in the last
+# pathname component; the root is everything before that.
+# It is always true that root + ext == p.
+
+def splitext(p: AnyStr) -> Tuple[AnyStr, AnyStr]:
+    if isinstance(p, bytes):
+        sep = b'/'
+        extsep = b'.'
+    else:
+        sep = '/'
+        extsep = '.'
+    return genericpath._splitext(p, sep, None, extsep)
+splitext.__doc__ = genericpath._splitext.__doc__
+
+# Split a pathname into a drive specification and the rest of the
+# path.  Useful on DOS/Windows/NT; on Unix, the drive is always empty.
+
+def splitdrive(p: AnyStr) -> Tuple[AnyStr, AnyStr]:
+    """Split a pathname into drive and path. On Posix, drive is always
+    empty."""
+    return p[:0], p
+
+
+# Return the tail (basename) part of a path, same as split(path)[1].
+
+def basename(p: AnyStr) -> AnyStr:
+    """Returns the final component of a pathname"""
+    sep = _get_sep(p)
+    i = p.rfind(sep) + 1
+    return p[i:]
+
+
+# Return the head (dirname) part of a path, same as split(path)[0].
+
+def dirname(p: AnyStr) -> AnyStr:
+    """Returns the directory component of a pathname"""
+    sep = _get_sep(p)
+    i = p.rfind(sep) + 1
+    head = p[:i]
+    if head and head != sep*len(head):
+        head = head.rstrip(sep)
+    return head
+
+
+# Is a path a symbolic link?
+# This will always return false on systems where os.lstat doesn't exist.
+
+def islink(path: AnyStr) -> bool:
+    """Test whether a path is a symbolic link"""
+    try:
+        st = os.lstat(path)
+    except (os.error, AttributeError):
+        return False
+    return stat.S_ISLNK(st.st_mode)
+
+# Being true for dangling symbolic links is also useful.
+
+def lexists(path: AnyStr) -> bool:
+    """Test whether a path exists.  Returns True for broken symbolic links"""
+    try:
+        os.lstat(path)
+    except os.error:
+        return False
+    return True
+
+
+# Are two filenames really pointing to the same file?
+
+def samefile(f1: AnyStr, f2: AnyStr) -> bool:
+    """Test whether two pathnames reference the same actual file"""
+    s1 = os.stat(f1)
+    s2 = os.stat(f2)
+    return samestat(s1, s2)
+
+
+# Are two open files really referencing the same file?
+# (Not necessarily the same file descriptor!)
+
+def sameopenfile(fp1: int, fp2: int) -> bool:
+    """Test whether two open file objects reference the same file"""
+    s1 = os.fstat(fp1)
+    s2 = os.fstat(fp2)
+    return samestat(s1, s2)
+
+
+# Are two stat buffers (obtained from stat, fstat or lstat)
+# describing the same file?
+
+def samestat(s1: os.stat_result, s2: os.stat_result) -> bool:
+    """Test whether two stat buffers reference the same file"""
+    return s1.st_ino == s2.st_ino and \
+           s1.st_dev == s2.st_dev
+
+
+# Is a path a mount point?
+# (Does this work for all UNIXes?  Is it even guaranteed to work by Posix?)
+
+def ismount(path: AnyStr) -> bool:
+    """Test whether a path is a mount point"""
+    if islink(path):
+        # A symlink can never be a mount point
+        return False
+    try:
+        s1 = os.lstat(path)
+        if isinstance(path, bytes):
+            parent = join(path, b'..')
+        else:
+            parent = join(path, '..')
+        s2 = os.lstat(parent)
+    except os.error:
+        return False # It doesn't exist -- so not a mount point :-)
+    dev1 = s1.st_dev
+    dev2 = s2.st_dev
+    if dev1 != dev2:
+        return True     # path/.. on a different device as path
+    ino1 = s1.st_ino
+    ino2 = s2.st_ino
+    if ino1 == ino2:
+        return True     # path/.. is the same i-node as path
+    return False
+
+
+# Expand paths beginning with '~' or '~user'.
+# '~' means $HOME; '~user' means that user's home directory.
+# If the path doesn't begin with '~', or if the user or $HOME is unknown,
+# the path is returned unchanged (leaving error reporting to whatever
+# function is called with the expanded path as argument).
+# See also module 'glob' for expansion of *, ? and [...] in pathnames.
+# (A function should also be defined to do full *sh-style environment
+# variable expansion.)
+
+def expanduser(path: AnyStr) -> AnyStr:
+    """Expand ~ and ~user constructions.  If user or $HOME is unknown,
+    do nothing."""
+    if isinstance(path, bytes):
+        tilde = b'~'
+    else:
+        tilde = '~'
+    if not path.startswith(tilde):
+        return path
+    sep = _get_sep(path)
+    i = path.find(sep, 1)
+    if i < 0:
+        i = len(path)
+    if i == 1:
+        userhome = None  # type: Union[str, bytes]
+        if 'HOME' not in os.environ:
+            import pwd
+            userhome = pwd.getpwuid(os.getuid()).pw_dir
+        else:
+            userhome = os.environ['HOME']
+    else:
+        import pwd
+        name = path[1:i]  # type: Union[str, bytes]
+        if isinstance(name, bytes):
+            name = str(name, 'ASCII')
+        try:
+            pwent = pwd.getpwnam(name)
+        except KeyError:
+            return path
+        userhome = pwent.pw_dir
+    if isinstance(path, bytes):
+        userhome = os.fsencode(userhome)
+        root = b'/'
+    else:
+        root = '/'
+    userhome = userhome.rstrip(root)
+    return (userhome + path[i:]) or root
+
+
+# Expand paths containing shell variable substitutions.
+# This expands the forms $variable and ${variable} only.
+# Non-existent variables are left unchanged.
+
+_varprog = None # type: Pattern[str]
+_varprogb = None # type: Pattern[bytes]
+
+def expandvars(path: AnyStr) -> AnyStr:
+    """Expand shell variables of form $var and ${var}.  Unknown variables
+    are left unchanged."""
+    global _varprog, _varprogb
+    if isinstance(path, bytes):
+        if b'$' not in path:
+            return path
+        if not _varprogb:
+            import re
+            _varprogb = re.compile(br'\$(\w+|\{[^}]*\})', re.ASCII)
+        search = _varprogb.search
+        start = b'{'
+        end = b'}'
+    else:
+        if '$' not in path:
+            return path
+        if not _varprog:
+            import re
+            _varprog = re.compile(r'\$(\w+|\{[^}]*\})', re.ASCII)
+        search = _varprog.search
+        start = '{'
+        end = '}'
+    i = 0
+    while True:
+        m = search(path, i)
+        if not m:
+            break
+        i, j = m.span(0)
+        name = None  # type: Union[str, bytes]
+        name = m.group(1)
+        if name.startswith(start) and name.endswith(end):
+            name = name[1:-1]
+        if isinstance(name, bytes):
+            name = str(name, 'ASCII')
+        if name in os.environ:
+            tail = path[j:]
+            value = None  # type: Union[str, bytes]
+            value = os.environ[name]
+            if isinstance(path, bytes):
+                value = value.encode('ASCII')
+            path = path[:i] + value
+            i = len(path)
+            path += tail
+        else:
+            i = j
+    return path
+
+
+# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
+# It should be understood that this may change the meaning of the path
+# if it contains symbolic links!
+
+def normpath(path: AnyStr) -> AnyStr:
+    """Normalize path, eliminating double slashes, etc."""
+    if isinstance(path, bytes):
+        sep = b'/'
+        empty = b''
+        dot = b'.'
+        dotdot = b'..'
+    else:
+        sep = '/'
+        empty = ''
+        dot = '.'
+        dotdot = '..'
+    if path == empty:
+        return dot
+    initial_slashes = path.startswith(sep) # type: int
+    # POSIX allows one or two initial slashes, but treats three or more
+    # as single slash.
+    if (initial_slashes and
+        path.startswith(sep*2) and not path.startswith(sep*3)):
+        initial_slashes = 2
+    comps = path.split(sep)
+    new_comps = []  # type: List[AnyStr]
+    for comp in comps:
+        if comp in (empty, dot):
+            continue
+        if (comp != dotdot or (not initial_slashes and not new_comps) or
+             (new_comps and new_comps[-1] == dotdot)):
+            new_comps.append(comp)
+        elif new_comps:
+            new_comps.pop()
+    comps = new_comps
+    path = sep.join(comps)
+    if initial_slashes:
+        path = sep*initial_slashes + path
+    return path or dot
+
+
+def abspath(path: AnyStr) -> AnyStr:
+    """Return an absolute path."""
+    if not isabs(path):
+        if isinstance(path, bytes):
+            cwd = os.getcwdb()
+        else:
+            cwd = os.getcwd()
+        path = join(cwd, path)
+    return normpath(path)
+
+
+# Return a canonical path (i.e. the absolute location of a file on the
+# filesystem).
+
+def realpath(filename: AnyStr) -> AnyStr:
+    """Return the canonical path of the specified filename, eliminating any
+symbolic links encountered in the path."""
+    if isinstance(filename, bytes):
+        sep = b'/'
+        empty = b''
+    else:
+        sep = '/'
+        empty = ''
+    if isabs(filename):
+        bits = [sep] + filename.split(sep)[1:]
+    else:
+        bits = [empty] + filename.split(sep)
+
+    for i in range(2, len(bits)+1):
+        component = join(*bits[0:i])
+        # Resolve symbolic links.
+        if islink(component):
+            resolved = _resolve_link(component)
+            if resolved is None:
+                # Infinite loop -- return original component + rest of the path
+                return abspath(join(*([component] + bits[i:])))
+            else:
+                newpath = join(*([resolved] + bits[i:]))
+                return realpath(newpath)
+
+    return abspath(filename)
+
+
+def _resolve_link(path: AnyStr) -> AnyStr:
+    """Internal helper function.  Takes a path and follows symlinks
+    until we either arrive at something that isn't a symlink, or
+    encounter a path we've seen before (meaning that there's a loop).
+    """
+    paths_seen = set()  # type: Set[AnyStr]
+    while islink(path):
+        if path in paths_seen:
+            # Already seen this path, so we must have a symlink loop
+            return None
+        paths_seen.add(path)
+        # Resolve where the link points to
+        resolved = os.readlink(path)
+        if not isabs(resolved):
+            dir = dirname(path)
+            path = normpath(join(dir, resolved))
+        else:
+            path = normpath(resolved)
+    return path
+
+supports_unicode_filenames = (sys.platform == 'darwin')
+
+def relpath(path: AnyStr, start: AnyStr = None) -> AnyStr:
+    """Return a relative version of a path"""
+
+    if not path:
+        raise ValueError("no path specified")
+
+    if isinstance(path, bytes):
+        curdir = b'.'
+        sep = b'/'
+        pardir = b'..'
+    else:
+        curdir = '.'
+        sep = '/'
+        pardir = '..'
+
+    if start is None:
+        start = curdir
+
+    start_list = [x for x in abspath(start).split(sep) if x]
+    path_list = [x for x in abspath(path).split(sep) if x]
+
+    # Work out how much of the filepath is shared by start and path.
+    i = len(commonprefix([start_list, path_list]))
+
+    rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
+    if not rel_list:
+        return curdir
+    return join(*rel_list)
diff --git a/test-data/stdlib-samples/3.2/pprint.py b/test-data/stdlib-samples/3.2/pprint.py
new file mode 100644
index 0000000..650c1a3
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/pprint.py
@@ -0,0 +1,380 @@
+#  Author:      Fred L. Drake, Jr.
+#               fdrake at acm.org
+#
+#  This is a simple little module I wrote to make life easier.  I didn't
+#  see anything quite like it in the library, though I may have overlooked
+#  something.  I wrote this when I was trying to read some heavily nested
+#  tuples with fairly non-descriptive content.  This is modeled very much
+#  after Lisp/Scheme - style pretty-printing of lists.  If you find it
+#  useful, thank small children who sleep at night.
+
+"""Support to pretty-print lists, tuples, & dictionaries recursively.
+
+Very simple, but useful, especially in debugging data structures.
+
+Classes
+-------
+
+PrettyPrinter()
+    Handle pretty-printing operations onto a stream using a configured
+    set of formatting parameters.
+
+Functions
+---------
+
+pformat()
+    Format a Python object into a pretty-printed representation.
+
+pprint()
+    Pretty-print a Python object to a stream [default is sys.stdout].
+
+saferepr()
+    Generate a 'standard' repr()-like value, but protect against recursive
+    data structures.
+
+"""
+
+import sys as _sys
+from collections import OrderedDict as _OrderedDict
+from io import StringIO as _StringIO
+
+from typing import Any, Tuple, Dict, TextIO, cast, List
+
+__all__ = ["pprint","pformat","isreadable","isrecursive","saferepr",
+           "PrettyPrinter"]
+
+# cache these for faster access:
+_commajoin = ", ".join
+_id = id
+_len = len
+_type = type
+
+
+def pprint(object: object, stream: TextIO = None, indent: int = 1,
+           width: int = 80, depth: int = None) -> None:
+    """Pretty-print a Python object to a stream [default is sys.stdout]."""
+    printer = PrettyPrinter(
+        stream=stream, indent=indent, width=width, depth=depth)
+    printer.pprint(object)
+
+def pformat(object: object, indent: int = 1, width: int = 80,
+            depth: int = None) -> str:
+    """Format a Python object into a pretty-printed representation."""
+    return PrettyPrinter(indent=indent, width=width, depth=depth).pformat(object)
+
+def saferepr(object: object) -> str:
+    """Version of repr() which can handle recursive data structures."""
+    return _safe_repr(object, {}, None, 0)[0]
+
+def isreadable(object: object) -> bool:
+    """Determine if saferepr(object) is readable by eval()."""
+    return _safe_repr(object, {}, None, 0)[1]
+
+def isrecursive(object: object) -> bool:
+    """Determine if object requires a recursive representation."""
+    return _safe_repr(object, {}, None, 0)[2]
+
+class _safe_key:
+    """Helper function for key functions when sorting unorderable objects.
+
+    The wrapped-object will fallback to an Py2.x style comparison for
+    unorderable types (sorting first comparing the type name and then by
+    the obj ids).  Does not work recursively, so dict.items() must have
+    _safe_key applied to both the key and the value.
+
+    """
+
+    __slots__ = ['obj']
+
+    def __init__(self, obj: Any) -> None:
+        self.obj = obj
+
+    def __lt__(self, other: Any) -> Any:
+        rv = self.obj.__lt__(other.obj) # type: Any
+        if rv is NotImplemented:
+            rv = (str(type(self.obj)), id(self.obj)) < \
+                 (str(type(other.obj)), id(other.obj))
+        return rv
+
+def _safe_tuple(t: Tuple[Any, Any]) -> Tuple[_safe_key, _safe_key]:
+    "Helper function for comparing 2-tuples"
+    return _safe_key(t[0]), _safe_key(t[1])
+
+class PrettyPrinter:
+    def __init__(self, indent: int = 1, width: int = 80, depth: int = None,
+                 stream: TextIO = None) -> None:
+        """Handle pretty printing operations onto a stream using a set of
+        configured parameters.
+
+        indent
+            Number of spaces to indent for each level of nesting.
+
+        width
+            Attempted maximum number of columns in the output.
+
+        depth
+            The maximum depth to print out nested structures.
+
+        stream
+            The desired output stream.  If omitted (or false), the standard
+            output stream available at construction will be used.
+
+        """
+        indent = int(indent)
+        width = int(width)
+        assert indent >= 0, "indent must be >= 0"
+        assert depth is None or depth > 0, "depth must be > 0"
+        assert width, "width must be != 0"
+        self._depth = depth
+        self._indent_per_level = indent
+        self._width = width
+        if stream is not None:
+            self._stream = stream
+        else:
+            self._stream = _sys.stdout
+
+    def pprint(self, object: object) -> None:
+        self._format(object, self._stream, 0, 0, {}, 0)
+        self._stream.write("\n")
+
+    def pformat(self, object: object) -> str:
+        sio = _StringIO()
+        self._format(object, sio, 0, 0, {}, 0)
+        return sio.getvalue()
+
+    def isrecursive(self, object: object) -> int:
+        return self.format(object, {}, 0, 0)[2]
+
+    def isreadable(self, object: object) -> int:
+        s, readable, recursive = self.format(object, {}, 0, 0)
+        return readable and not recursive
+
+    def _format(self, object: object, stream: TextIO, indent: int,
+                allowance: int, context: Dict[int, int], level: int) -> None:
+        level = level + 1
+        objid = _id(object)
+        if objid in context:
+            stream.write(_recursion(object))
+            self._recursive = True
+            self._readable = False
+            return
+        rep = self._repr(object, context, level - 1)
+        typ = _type(object)
+        sepLines = _len(rep) > (self._width - 1 - indent - allowance)
+        write = stream.write
+
+        if self._depth and level > self._depth:
+            write(rep)
+            return
+
+        if sepLines:
+            r = getattr(typ, "__repr__", None)
+            if isinstance(object, dict):
+                write('{')
+                if self._indent_per_level > 1:
+                    write((self._indent_per_level - 1) * ' ')
+                length = _len(object)
+                if length:
+                    context[objid] = 1
+                    indent = indent + self._indent_per_level
+                    if issubclass(typ, _OrderedDict):
+                        items = list(object.items())
+                    else:
+                        items = sorted(object.items(), key=_safe_tuple)
+                    key, ent = items[0]
+                    rep = self._repr(key, context, level)
+                    write(rep)
+                    write(': ')
+                    self._format(ent, stream, indent + _len(rep) + 2,
+                                  allowance + 1, context, level)
+                    if length > 1:
+                        for key, ent in items[1:]:
+                            rep = self._repr(key, context, level)
+                            write(',\n%s%s: ' % (' '*indent, rep))
+                            self._format(ent, stream, indent + _len(rep) + 2,
+                                          allowance + 1, context, level)
+                    indent = indent - self._indent_per_level
+                    del context[objid]
+                write('}')
+                return
+
+            if ((issubclass(typ, list) and r is list.__repr__) or
+                (issubclass(typ, tuple) and r is tuple.__repr__) or
+                (issubclass(typ, set) and r is set.__repr__) or
+                (issubclass(typ, frozenset) and r is frozenset.__repr__)
+               ):
+                anyobj = cast(Any, object) # TODO Collection?
+                length = _len(anyobj)
+                if issubclass(typ, list):
+                    write('[')
+                    endchar = ']'
+                    lst = anyobj
+                elif issubclass(typ, set):
+                    if not length:
+                        write('set()')
+                        return
+                    write('{')
+                    endchar = '}'
+                    lst = sorted(anyobj, key=_safe_key)
+                elif issubclass(typ, frozenset):
+                    if not length:
+                        write('frozenset()')
+                        return
+                    write('frozenset({')
+                    endchar = '})'
+                    lst = sorted(anyobj, key=_safe_key)
+                    indent += 10
+                else:
+                    write('(')
+                    endchar = ')'
+                    lst = list(anyobj)
+                if self._indent_per_level > 1:
+                    write((self._indent_per_level - 1) * ' ')
+                if length:
+                    context[objid] = 1
+                    indent = indent + self._indent_per_level
+                    self._format(lst[0], stream, indent, allowance + 1,
+                                 context, level)
+                    if length > 1:
+                        for ent in lst[1:]:
+                            write(',\n' + ' '*indent)
+                            self._format(ent, stream, indent,
+                                          allowance + 1, context, level)
+                    indent = indent - self._indent_per_level
+                    del context[objid]
+                if issubclass(typ, tuple) and length == 1:
+                    write(',')
+                write(endchar)
+                return
+
+        write(rep)
+
+    def _repr(self, object: object, context: Dict[int, int],
+              level: int) -> str:
+        repr, readable, recursive = self.format(object, context.copy(),
+                                                self._depth, level)
+        if not readable:
+            self._readable = False
+        if recursive:
+            self._recursive = True
+        return repr
+
+    def format(self, object: object, context: Dict[int, int],
+               maxlevels: int, level: int) -> Tuple[str, int, int]:
+        """Format object for a specific context, returning a string
+        and flags indicating whether the representation is 'readable'
+        and whether the object represents a recursive construct.
+        """
+        return _safe_repr(object, context, maxlevels, level)
+
+
+# Return triple (repr_string, isreadable, isrecursive).
+
+def _safe_repr(object: object, context: Dict[int, int],
+               maxlevels: int, level: int) -> Tuple[str, bool, bool]:
+    typ = _type(object)
+    if typ is str:
+        s = cast(str, object)
+        if 'locale' not in _sys.modules:
+            return repr(object), True, False
+        if "'" in s and '"' not in s:
+            closure = '"'
+            quotes = {'"': '\\"'}
+        else:
+            closure = "'"
+            quotes = {"'": "\\'"}
+        qget = quotes.get
+        sio = _StringIO()
+        write = sio.write
+        for char in s:
+            if char.isalpha():
+                write(char)
+            else:
+                write(qget(char, repr(char)[1:-1]))
+        return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False
+
+    r = getattr(typ, "__repr__", None)
+    if issubclass(typ, dict) and r is dict.__repr__:
+        if not object:
+            return "{}", True, False
+        objid = _id(object)
+        if maxlevels and level >= maxlevels:
+            return "{...}", False, objid in context
+        if objid in context:
+            return _recursion(object), False, True
+        context[objid] = 1
+        readable = True
+        recursive = False
+        components = []  # type: List[str]
+        append = components.append
+        level += 1
+        saferepr = _safe_repr
+        items = sorted((cast(dict, object)).items(), key=_safe_tuple)
+        for k, v in items:
+            krepr, kreadable, krecur = saferepr(k, context, maxlevels, level)
+            vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level)
+            append("%s: %s" % (krepr, vrepr))
+            readable = readable and kreadable and vreadable
+            if krecur or vrecur:
+                recursive = True
+        del context[objid]
+        return "{%s}" % _commajoin(components), readable, recursive
+
+    if (issubclass(typ, list) and r is list.__repr__) or \
+       (issubclass(typ, tuple) and r is tuple.__repr__):
+        anyobj = cast(Any, object) # TODO Sequence?
+        if issubclass(typ, list):
+            if not object:
+                return "[]", True, False
+            format = "[%s]"
+        elif _len(anyobj) == 1:
+            format = "(%s,)"
+        else:
+            if not object:
+                return "()", True, False
+            format = "(%s)"
+        objid = _id(object)
+        if maxlevels and level >= maxlevels:
+            return format % "...", False, objid in context
+        if objid in context:
+            return _recursion(object), False, True
+        context[objid] = 1
+        readable = True
+        recursive = False
+        components = []
+        append = components.append
+        level += 1
+        for o in anyobj:
+            orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level)
+            append(orepr)
+            if not oreadable:
+                readable = False
+            if orecur:
+                recursive = True
+        del context[objid]
+        return format % _commajoin(components), readable, recursive
+
+    rep = repr(object)
+    return rep, bool(rep and not rep.startswith('<')), False
+
+
+def _recursion(object: object) -> str:
+    return ("<Recursion on %s with id=%s>"
+            % (_type(object).__name__, _id(object)))
+
+
+def _perfcheck(object: object = None) -> None:
+    import time
+    if object is None:
+        object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000
+    p = PrettyPrinter()
+    t1 = time.time()
+    _safe_repr(object, {}, None, 0)
+    t2 = time.time()
+    p.pformat(object)
+    t3 = time.time()
+    print("_safe_repr:", t2 - t1)
+    print("pformat:", t3 - t2)
+
+if __name__ == "__main__":
+    _perfcheck()
diff --git a/test-data/stdlib-samples/3.2/random.py b/test-data/stdlib-samples/3.2/random.py
new file mode 100644
index 0000000..5cb579e
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/random.py
@@ -0,0 +1,743 @@
+"""Random variable generators.
+
+    integers
+    --------
+           uniform within range
+
+    sequences
+    ---------
+           pick random element
+           pick random sample
+           generate random permutation
+
+    distributions on the real line:
+    ------------------------------
+           uniform
+           triangular
+           normal (Gaussian)
+           lognormal
+           negative exponential
+           gamma
+           beta
+           pareto
+           Weibull
+
+    distributions on the circle (angles 0 to 2pi)
+    ---------------------------------------------
+           circular uniform
+           von Mises
+
+General notes on the underlying Mersenne Twister core generator:
+
+* The period is 2**19937-1.
+* It is one of the most extensively tested generators in existence.
+* The random() method is implemented in C, executes in a single Python step,
+  and is, therefore, threadsafe.
+
+"""
+
+from warnings import warn as _warn
+from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethodType
+from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
+from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
+from os import urandom as _urandom
+from collections import Set as _Set, Sequence as _Sequence
+from hashlib import sha512 as _sha512
+
+from typing import (
+    Any, TypeVar, Iterable, Sequence, List, Callable, Set, cast, SupportsInt, Union
+)
+
+__all__ = ["Random","seed","random","uniform","randint","choice","sample",
+           "randrange","shuffle","normalvariate","lognormvariate",
+           "expovariate","vonmisesvariate","gammavariate","triangular",
+           "gauss","betavariate","paretovariate","weibullvariate",
+           "getstate","setstate", "getrandbits",
+           "SystemRandom"]
+
+NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0)
+TWOPI = 2.0*_pi
+LOG4 = _log(4.0)
+SG_MAGICCONST = 1.0 + _log(4.5)
+BPF = 53        # Number of bits in a float
+RECIP_BPF = 2**-BPF # type: float
+
+
+# Translated by Guido van Rossum from C source provided by
+# Adrian Baddeley.  Adapted by Raymond Hettinger for use with
+# the Mersenne Twister  and os.urandom() core generators.
+
+import _random
+
+T = TypeVar('T')
+
+class Random(_random.Random):
+    """Random number generator base class used by bound module functions.
+
+    Used to instantiate instances of Random to get generators that don't
+    share state.
+
+    Class Random can also be subclassed if you want to use a different basic
+    generator of your own devising: in that case, override the following
+    methods:  random(), seed(), getstate(), and setstate().
+    Optionally, implement a getrandbits() method so that randrange()
+    can cover arbitrarily large ranges.
+
+    """
+
+    VERSION = 3     # used by getstate/setstate
+    gauss_next = 0.0
+
+    def __init__(self, x: object = None) -> None:
+        """Initialize an instance.
+
+        Optional argument x controls seeding, as for Random.seed().
+        """
+
+        self.seed(x)
+        self.gauss_next = None
+
+    def seed(self, a: Any = None, version: int = 2) -> None:
+        """Initialize internal state from hashable object.
+
+        None or no argument seeds from current time or from an operating
+        system specific randomness source if available.
+
+        For version 2 (the default), all of the bits are used if *a *is a str,
+        bytes, or bytearray.  For version 1, the hash() of *a* is used instead.
+
+        If *a* is an int, all bits are used.
+
+        """
+
+        if a is None:
+            try:
+                a = int.from_bytes(_urandom(32), 'big')
+            except NotImplementedError:
+                import time
+                a = int(time.time() * 256) # use fractional seconds
+
+        if version == 2:
+            if isinstance(a, (str, bytes, bytearray)):
+                if isinstance(a, str):
+                    a = a.encode()
+                a += _sha512(a).digest()
+                a = int.from_bytes(a, 'big')
+
+        super().seed(a)
+        self.gauss_next = None
+
+    def getstate(self) -> tuple:
+        """Return internal state; can be passed to setstate() later."""
+        return self.VERSION, super().getstate(), self.gauss_next
+
+    def setstate(self, state: tuple) -> None:
+        """Restore internal state from object returned by getstate()."""
+        version = state[0]
+        if version == 3:
+            version, internalstate, self.gauss_next = state
+            super().setstate(internalstate)
+        elif version == 2:
+            version, internalstate, self.gauss_next = state
+            # In version 2, the state was saved as signed ints, which causes
+            #   inconsistencies between 32/64-bit systems. The state is
+            #   really unsigned 32-bit ints, so we convert negative ints from
+            #   version 2 to positive longs for version 3.
+            try:
+                internalstate = tuple(x % (2**32) for x in internalstate)
+            except ValueError as e:
+                raise TypeError()
+            super().setstate(internalstate)
+        else:
+            raise ValueError("state with version %s passed to "
+                             "Random.setstate() of version %s" %
+                             (version, self.VERSION))
+
+## ---- Methods below this point do not need to be overridden when
+## ---- subclassing for the purpose of using a different core generator.
+
+## -------------------- pickle support  -------------------
+
+    def __getstate__(self) -> object: # for pickle
+        return self.getstate()
+
+    def __setstate__(self, state: Any) -> None:  # for pickle
+        self.setstate(state)
+
+    def __reduce__(self) -> tuple:
+        return self.__class__, (), self.getstate()
+
+## -------------------- integer methods  -------------------
+
+    def randrange(self, start: SupportsInt, stop: SupportsInt = None,
+                  step: int = 1, int: Callable[[SupportsInt],
+                                               int] = int) -> int:
+        """Choose a random item from range(start, stop[, step]).
+
+        This fixes the problem with randint() which includes the
+        endpoint; in Python this is usually not what you want.
+
+        Do not supply the 'int' argument.
+        """
+
+        # This code is a bit messy to make it fast for the
+        # common case while still doing adequate error checking.
+        istart = int(start)
+        if istart != start:
+            raise ValueError("non-integer arg 1 for randrange()")
+        if stop is None:
+            if istart > 0:
+                return self._randbelow(istart)
+            raise ValueError("empty range for randrange()")
+
+        # stop argument supplied.
+        istop = int(stop)
+        if istop != stop:
+            raise ValueError("non-integer stop for randrange()")
+        width = istop - istart
+        if step == 1 and width > 0:
+            return istart + self._randbelow(width)
+        if step == 1:
+            raise ValueError("empty range for randrange() (%d,%d, %d)" % (istart, istop, width))
+
+        # Non-unit step argument supplied.
+        istep = int(step)
+        if istep != step:
+            raise ValueError("non-integer step for randrange()")
+        if istep > 0:
+            n = (width + istep - 1) // istep
+        elif istep < 0:
+            n = (width + istep + 1) // istep
+        else:
+            raise ValueError("zero step for randrange()")
+
+        if n <= 0:
+            raise ValueError("empty range for randrange()")
+
+        return istart + istep*self._randbelow(n)
+
+    def randint(self, a: int, b: int) -> int:
+        """Return random integer in range [a, b], including both end points.
+        """
+
+        return self.randrange(a, b+1)
+
+    def _randbelow(self, n: int, int: Callable[[float], int] = int,
+                   maxsize: int = 1<<BPF,
+                   type: Callable[[object], type] = type,
+                   Method: type = _MethodType,
+                   BuiltinMethod: type = _BuiltinMethodType) -> int:
+        "Return a random int in the range [0,n).  Raises ValueError if n==0."
+
+        getrandbits = self.getrandbits
+        # Only call self.getrandbits if the original random() builtin method
+        # has not been overridden or if a new getrandbits() was supplied.
+        if type(self.random) is BuiltinMethod or type(getrandbits) is Method:
+            k = n.bit_length()  # don't use (n-1) here because n can be 1
+            r = getrandbits(k)          # 0 <= r < 2**k
+            while r >= n:
+                r = getrandbits(k)
+            return r
+        # There's an overriden random() method but no new getrandbits() method,
+        # so we can only use random() from here.
+        random = self.random
+        if n >= maxsize:
+            _warn("Underlying random() generator does not supply \n"
+                "enough bits to choose from a population range this large.\n"
+                "To remove the range limitation, add a getrandbits() method.")
+            return int(random() * n)
+        rem = maxsize % n
+        limit = (maxsize - rem) / maxsize   # int(limit * maxsize) % n == 0
+        s = random()
+        while s >= limit:
+            s = random()
+        return int(s*maxsize) % n
+
+## -------------------- sequence methods  -------------------
+
+    def choice(self, seq: Sequence[T]) -> T:
+        """Choose a random element from a non-empty sequence."""
+        try:
+            i = self._randbelow(len(seq))
+        except ValueError:
+            raise IndexError('Cannot choose from an empty sequence')
+        return seq[i]
+
+    def shuffle(self, x: List[T],
+                random: Callable[[], float] = None,
+                int: Callable[[float], int] = int) -> None:
+        """x, random=random.random -> shuffle list x in place; return None.
+
+        Optional arg random is a 0-argument function returning a random
+        float in [0.0, 1.0); by default, the standard random.random.
+        """
+
+        randbelow = self._randbelow
+        for i in reversed(range(1, len(x))):
+            # pick an element in x[:i+1] with which to exchange x[i]
+            j = randbelow(i+1) if random is None else int(random() * (i+1))
+            x[i], x[j] = x[j], x[i]
+
+    def sample(self, population: Union[_Set[T], _Sequence[T]], k: int) -> List[T]:
+        """Chooses k unique random elements from a population sequence or set.
+
+        Returns a new list containing elements from the population while
+        leaving the original population unchanged.  The resulting list is
+        in selection order so that all sub-slices will also be valid random
+        samples.  This allows raffle winners (the sample) to be partitioned
+        into grand prize and second place winners (the subslices).
+
+        Members of the population need not be hashable or unique.  If the
+        population contains repeats, then each occurrence is a possible
+        selection in the sample.
+
+        To choose a sample in a range of integers, use range as an argument.
+        This is especially fast and space efficient for sampling from a
+        large population:   sample(range(10000000), 60)
+        """
+
+        # Sampling without replacement entails tracking either potential
+        # selections (the pool) in a list or previous selections in a set.
+
+        # When the number of selections is small compared to the
+        # population, then tracking selections is efficient, requiring
+        # only a small set and an occasional reselection.  For
+        # a larger number of selections, the pool tracking method is
+        # preferred since the list takes less space than the
+        # set and it doesn't suffer from frequent reselections.
+
+        if isinstance(population, _Set):
+            population = list(population)
+        if not isinstance(population, _Sequence):
+            raise TypeError("Population must be a sequence or set.  For dicts, use list(d).")
+        randbelow = self._randbelow
+        n = len(population)
+        if not (0 <= k and k <= n):
+            raise ValueError("Sample larger than population")
+        result = [cast(T, None)] * k
+        setsize = 21        # size of a small set minus size of an empty list
+        if k > 5:
+            setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
+        if n <= setsize:
+            # An n-length list is smaller than a k-length set
+            pool = list(population)
+            for i in range(k):         # invariant:  non-selected at [0,n-i)
+                j = randbelow(n-i)
+                result[i] = pool[j]
+                pool[j] = pool[n-i-1]   # move non-selected item into vacancy
+        else:
+            selected = set()  # type: Set[int]
+            selected_add = selected.add
+            for i in range(k):
+                j = randbelow(n)
+                while j in selected:
+                    j = randbelow(n)
+                selected_add(j)
+                result[i] = population[j]
+        return result
+
+## -------------------- real-valued distributions  -------------------
+
+## -------------------- uniform distribution -------------------
+
+    def uniform(self, a: float, b: float) -> float:
+        "Get a random number in the range [a, b) or [a, b] depending on rounding."
+        return a + (b-a) * self.random()
+
+## -------------------- triangular --------------------
+
+    def triangular(self, low: float = 0.0, high: float = 1.0,
+                   mode: float = None) -> float:
+        """Triangular distribution.
+
+        Continuous distribution bounded by given lower and upper limits,
+        and having a given mode value in-between.
+
+        http://en.wikipedia.org/wiki/Triangular_distribution
+
+        """
+        u = self.random()
+        c = 0.5 if mode is None else (mode - low) / (high - low)
+        if u > c:
+            u = 1.0 - u
+            c = 1.0 - c
+            low, high = high, low
+        return low + (high - low) * (u * c) ** 0.5
+
+## -------------------- normal distribution --------------------
+
+    def normalvariate(self, mu: float, sigma: float) -> float:
+        """Normal distribution.
+
+        mu is the mean, and sigma is the standard deviation.
+
+        """
+        # mu = mean, sigma = standard deviation
+
+        # Uses Kinderman and Monahan method. Reference: Kinderman,
+        # A.J. and Monahan, J.F., "Computer generation of random
+        # variables using the ratio of uniform deviates", ACM Trans
+        # Math Software, 3, (1977), pp257-260.
+
+        random = self.random
+        while 1:
+            u1 = random()
+            u2 = 1.0 - random()
+            z = NV_MAGICCONST*(u1-0.5)/u2
+            zz = z*z/4.0
+            if zz <= -_log(u2):
+                break
+        return mu + z*sigma
+
+## -------------------- lognormal distribution --------------------
+
+    def lognormvariate(self, mu: float, sigma: float) -> float:
+        """Log normal distribution.
+
+        If you take the natural logarithm of this distribution, you'll get a
+        normal distribution with mean mu and standard deviation sigma.
+        mu can have any value, and sigma must be greater than zero.
+
+        """
+        return _exp(self.normalvariate(mu, sigma))
+
+## -------------------- exponential distribution --------------------
+
+    def expovariate(self, lambd: float) -> float:
+        """Exponential distribution.
+
+        lambd is 1.0 divided by the desired mean.  It should be
+        nonzero.  (The parameter would be called "lambda", but that is
+        a reserved word in Python.)  Returned values range from 0 to
+        positive infinity if lambd is positive, and from negative
+        infinity to 0 if lambd is negative.
+
+        """
+        # lambd: rate lambd = 1/mean
+        # ('lambda' is a Python reserved word)
+
+        # we use 1-random() instead of random() to preclude the
+        # possibility of taking the log of zero.
+        return -_log(1.0 - self.random())/lambd
+
+## -------------------- von Mises distribution --------------------
+
+    def vonmisesvariate(self, mu: float, kappa: float) -> float:
+        """Circular data distribution.
+
+        mu is the mean angle, expressed in radians between 0 and 2*pi, and
+        kappa is the concentration parameter, which must be greater than or
+        equal to zero.  If kappa is equal to zero, this distribution reduces
+        to a uniform random angle over the range 0 to 2*pi.
+
+        """
+        # mu:    mean angle (in radians between 0 and 2*pi)
+        # kappa: concentration parameter kappa (>= 0)
+        # if kappa = 0 generate uniform random angle
+
+        # Based upon an algorithm published in: Fisher, N.I.,
+        # "Statistical Analysis of Circular Data", Cambridge
+        # University Press, 1993.
+
+        # Thanks to Magnus Kessler for a correction to the
+        # implementation of step 4.
+
+        random = self.random
+        if kappa <= 1e-6:
+            return TWOPI * random()
+
+        a = 1.0 + _sqrt(1.0 + 4.0 * kappa * kappa)
+        b = (a - _sqrt(2.0 * a))/(2.0 * kappa)
+        r = (1.0 + b * b)/(2.0 * b)
+
+        while 1:
+            u1 = random()
+
+            z = _cos(_pi * u1)
+            f = (1.0 + r * z)/(r + z)
+            c = kappa * (r - f)
+
+            u2 = random()
+
+            if u2 < c * (2.0 - c) or u2 <= c * _exp(1.0 - c):
+                break
+
+        u3 = random()
+        if u3 > 0.5:
+            theta = (mu % TWOPI) + _acos(f)
+        else:
+            theta = (mu % TWOPI) - _acos(f)
+
+        return theta
+
+## -------------------- gamma distribution --------------------
+
+    def gammavariate(self, alpha: float, beta: float) -> float:
+        """Gamma distribution.  Not the gamma function!
+
+        Conditions on the parameters are alpha > 0 and beta > 0.
+
+        The probability distribution function is:
+
+                    x ** (alpha - 1) * math.exp(-x / beta)
+          pdf(x) =  --------------------------------------
+                      math.gamma(alpha) * beta ** alpha
+
+        """
+
+        # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
+
+        # Warning: a few older sources define the gamma distribution in terms
+        # of alpha > -1.0
+        if alpha <= 0.0 or beta <= 0.0:
+            raise ValueError('gammavariate: alpha and beta must be > 0.0')
+
+        random = self.random
+        if alpha > 1.0:
+
+            # Uses R.C.H. Cheng, "The generation of Gamma
+            # variables with non-integral shape parameters",
+            # Applied Statistics, (1977), 26, No. 1, p71-74
+
+            ainv = _sqrt(2.0 * alpha - 1.0)
+            bbb = alpha - LOG4
+            ccc = alpha + ainv
+
+            while 1:
+                u1 = random()
+                if not (1e-7 < u1 and u1 < .9999999):
+                    continue
+                u2 = 1.0 - random()
+                v = _log(u1/(1.0-u1))/ainv
+                x = alpha*_exp(v)
+                z = u1*u1*u2
+                r = bbb+ccc*v-x
+                if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z):
+                    return x * beta
+
+        elif alpha == 1.0:
+            # expovariate(1)
+            u = random()
+            while u <= 1e-7:
+                u = random()
+            return -_log(u) * beta
+
+        else:   # alpha is between 0 and 1 (exclusive)
+
+            # Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
+
+            while 1:
+                u = random()
+                b = (_e + alpha)/_e
+                p = b*u
+                if p <= 1.0:
+                    x = p ** (1.0/alpha)
+                else:
+                    x = -_log((b-p)/alpha)
+                u1 = random()
+                if p > 1.0:
+                    if u1 <= x ** (alpha - 1.0):
+                        break
+                elif u1 <= _exp(-x):
+                    break
+            return x * beta
+
+## -------------------- Gauss (faster alternative) --------------------
+
+    def gauss(self, mu: float, sigma: float) -> float:
+        """Gaussian distribution.
+
+        mu is the mean, and sigma is the standard deviation.  This is
+        slightly faster than the normalvariate() function.
+
+        Not thread-safe without a lock around calls.
+
+        """
+
+        # When x and y are two variables from [0, 1), uniformly
+        # distributed, then
+        #
+        #    cos(2*pi*x)*sqrt(-2*log(1-y))
+        #    sin(2*pi*x)*sqrt(-2*log(1-y))
+        #
+        # are two *independent* variables with normal distribution
+        # (mu = 0, sigma = 1).
+        # (Lambert Meertens)
+        # (corrected version; bug discovered by Mike Miller, fixed by LM)
+
+        # Multithreading note: When two threads call this function
+        # simultaneously, it is possible that they will receive the
+        # same return value.  The window is very small though.  To
+        # avoid this, you have to use a lock around all calls.  (I
+        # didn't want to slow this down in the serial case by using a
+        # lock here.)
+
+        random = self.random
+        z = self.gauss_next
+        self.gauss_next = None
+        if z is None:
+            x2pi = random() * TWOPI
+            g2rad = _sqrt(-2.0 * _log(1.0 - random()))
+            z = _cos(x2pi) * g2rad
+            self.gauss_next = _sin(x2pi) * g2rad
+
+        return mu + z*sigma
+
+## -------------------- beta --------------------
+## See
+## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html
+## for Ivan Frohne's insightful analysis of why the original implementation:
+##
+##    def betavariate(self, alpha, beta):
+##        # Discrete Event Simulation in C, pp 87-88.
+##
+##        y = self.expovariate(alpha)
+##        z = self.expovariate(1.0/beta)
+##        return z/(y+z)
+##
+## was dead wrong, and how it probably got that way.
+
+    def betavariate(self, alpha: float, beta: float) -> 'float':
+        """Beta distribution.
+
+        Conditions on the parameters are alpha > 0 and beta > 0.
+        Returned values range between 0 and 1.
+
+        """
+
+        # This version due to Janne Sinkkonen, and matches all the std
+        # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
+        y = self.gammavariate(alpha, 1.)
+        if y == 0:
+            return 0.0
+        else:
+            return y / (y + self.gammavariate(beta, 1.))
+
+## -------------------- Pareto --------------------
+
+    def paretovariate(self, alpha: float) -> float:
+        """Pareto distribution.  alpha is the shape parameter."""
+        # Jain, pg. 495
+
+        u = 1.0 - self.random()
+        return 1.0 / u ** (1.0/alpha)
+
+## -------------------- Weibull --------------------
+
+    def weibullvariate(self, alpha: float, beta: float) -> float:
+        """Weibull distribution.
+
+        alpha is the scale parameter and beta is the shape parameter.
+
+        """
+        # Jain, pg. 499; bug fix courtesy Bill Arms
+
+        u = 1.0 - self.random()
+        return alpha * (-_log(u)) ** (1.0/beta)
+
+## --------------- Operating System Random Source  ------------------
+
+class SystemRandom(Random):
+    """Alternate random number generator using sources provided
+    by the operating system (such as /dev/urandom on Unix or
+    CryptGenRandom on Windows).
+
+     Not available on all systems (see os.urandom() for details).
+    """
+
+    def random(self) -> float:
+        """Get the next random number in the range [0.0, 1.0)."""
+        return (int.from_bytes(_urandom(7), 'big') >> 3) * RECIP_BPF
+
+    def getrandbits(self, k: int) -> int:
+        """getrandbits(k) -> x.  Generates a long int with k random bits."""
+        if k <= 0:
+            raise ValueError('number of bits must be greater than zero')
+        if k != int(k):
+            raise TypeError('number of bits should be an integer')
+        numbytes = (k + 7) // 8                       # bits / 8 and rounded up
+        x = int.from_bytes(_urandom(numbytes), 'big')
+        return x >> (numbytes * 8 - k)                # trim excess bits
+
+    def seed(self, a: object = None, version: int = None) -> None:
+        "Stub method.  Not used for a system random number generator."
+        return
+
+    def _notimplemented(self, *args: Any, **kwds: Any) -> Any:
+        "Method should not be called for a system random number generator."
+        raise NotImplementedError('System entropy source does not have state.')
+    getstate = setstate = _notimplemented
+
+# Create one instance, seeded from current time, and export its methods
+# as module-level functions.  The functions share state across all uses
+#(both in the user's code and in the Python libraries), but that's fine
+# for most programs and is easier for the casual user than making them
+# instantiate their own Random() instance.
+
+_inst = Random()
+seed = _inst.seed
+random = _inst.random
+uniform = _inst.uniform
+triangular = _inst.triangular
+randint = _inst.randint
+choice = _inst.choice
+randrange = _inst.randrange
+sample = _inst.sample
+shuffle = _inst.shuffle
+normalvariate = _inst.normalvariate
+lognormvariate = _inst.lognormvariate
+expovariate = _inst.expovariate
+vonmisesvariate = _inst.vonmisesvariate
+gammavariate = _inst.gammavariate
+gauss = _inst.gauss
+betavariate = _inst.betavariate
+paretovariate = _inst.paretovariate
+weibullvariate = _inst.weibullvariate
+getstate = _inst.getstate
+setstate = _inst.setstate
+getrandbits = _inst.getrandbits
+
+## -------------------- test program --------------------
+
+def _test_generator(n: int, func: Any, args: tuple) -> None:
+    import time
+    print(n, 'times', func.__name__)
+    total = 0.0
+    sqsum = 0.0
+    smallest = 1e10
+    largest = -1e10
+    t0 = time.time()
+    for i in range(n):
+        x = func(*args) # type: float
+        total += x
+        sqsum = sqsum + x*x
+        smallest = min(x, smallest)
+        largest = max(x, largest)
+    t1 = time.time()
+    print(round(t1-t0, 3), 'sec,', end=' ')
+    avg = total/n
+    stddev = _sqrt(sqsum/n - avg*avg)
+    print('avg %g, stddev %g, min %g, max %g' % \
+              (avg, stddev, smallest, largest))
+
+
+def _test(N: int = 2000) -> None:
+    _test_generator(N, random, ())
+    _test_generator(N, normalvariate, (0.0, 1.0))
+    _test_generator(N, lognormvariate, (0.0, 1.0))
+    _test_generator(N, vonmisesvariate, (0.0, 1.0))
+    _test_generator(N, gammavariate, (0.01, 1.0))
+    _test_generator(N, gammavariate, (0.1, 1.0))
+    _test_generator(N, gammavariate, (0.1, 2.0))
+    _test_generator(N, gammavariate, (0.5, 1.0))
+    _test_generator(N, gammavariate, (0.9, 1.0))
+    _test_generator(N, gammavariate, (1.0, 1.0))
+    _test_generator(N, gammavariate, (2.0, 1.0))
+    _test_generator(N, gammavariate, (20.0, 1.0))
+    _test_generator(N, gammavariate, (200.0, 1.0))
+    _test_generator(N, gauss, (0.0, 1.0))
+    _test_generator(N, betavariate, (3.0, 3.0))
+    _test_generator(N, triangular, (0.0, 1.0, 1.0/3.0))
+
+if __name__ == '__main__':
+    _test()
diff --git a/test-data/stdlib-samples/3.2/shutil.py b/test-data/stdlib-samples/3.2/shutil.py
new file mode 100644
index 0000000..7204a4d
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/shutil.py
@@ -0,0 +1,790 @@
+"""Utility functions for copying and archiving files and directory trees.
+
+XXX The functions here don't copy the resource fork or other metadata on Mac.
+
+"""
+
+import os
+import sys
+import stat
+from os.path import abspath
+import fnmatch
+import collections
+import errno
+import tarfile
+import builtins
+
+from typing import (
+    Any, AnyStr, IO, List, Iterable, Callable, Tuple, Dict, Sequence, cast
+)
+from types import TracebackType
+
+try:
+    import bz2
+    _BZ2_SUPPORTED = True
+except ImportError:
+    _BZ2_SUPPORTED = False
+
+try:
+    from pwd import getpwnam as _getpwnam
+    getpwnam = _getpwnam
+except ImportError:
+    getpwnam = None
+
+try:
+    from grp import getgrnam as _getgrnam
+    getgrnam = _getgrnam
+except ImportError:
+    getgrnam = None
+
+__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
+           "copytree", "move", "rmtree", "Error", "SpecialFileError",
+           "ExecError", "make_archive", "get_archive_formats",
+           "register_archive_format", "unregister_archive_format",
+           "get_unpack_formats", "register_unpack_format",
+           "unregister_unpack_format", "unpack_archive", "ignore_patterns"]
+
+class Error(EnvironmentError):
+    pass
+
+class SpecialFileError(EnvironmentError):
+    """Raised when trying to do a kind of operation (e.g. copying) which is
+    not supported on a special file (e.g. a named pipe)"""
+
+class ExecError(EnvironmentError):
+    """Raised when a command could not be executed"""
+
+class ReadError(EnvironmentError):
+    """Raised when an archive cannot be read"""
+
+class RegistryError(Exception):
+    """Raised when a registery operation with the archiving
+    and unpacking registeries fails"""
+
+
+try:
+    _WindowsError = WindowsError # type: type
+except NameError:
+    _WindowsError = None
+
+
+# Function aliases to be patched in test cases
+rename = os.rename
+open = builtins.open
+
+
+def copyfileobj(fsrc: IO[AnyStr], fdst: IO[AnyStr],
+                length: int = 16*1024) -> None:
+    """copy data from file-like object fsrc to file-like object fdst"""
+    while 1:
+        buf = fsrc.read(length)
+        if not buf:
+            break
+        fdst.write(buf)
+
+def _samefile(src: str, dst: str) -> bool:
+    # Macintosh, Unix.
+    if hasattr(os.path, 'samefile'):
+        try:
+            return os.path.samefile(src, dst)
+        except OSError:
+            return False
+
+    # All other platforms: check for same pathname.
+    return (os.path.normcase(os.path.abspath(src)) ==
+            os.path.normcase(os.path.abspath(dst)))
+
+def copyfile(src: str, dst: str) -> None:
+    """Copy data from src to dst"""
+    if _samefile(src, dst):
+        raise Error("`%s` and `%s` are the same file" % (src, dst))
+
+    for fn in [src, dst]:
+        try:
+            st = os.stat(fn)
+        except OSError:
+            # File most likely does not exist
+            pass
+        else:
+            # XXX What about other special files? (sockets, devices...)
+            if stat.S_ISFIFO(st.st_mode):
+                raise SpecialFileError("`%s` is a named pipe" % fn)
+
+    with open(src, 'rb') as fsrc:
+        with open(dst, 'wb') as fdst:
+            copyfileobj(fsrc, fdst)
+
+def copymode(src: str, dst: str) -> None:
+    """Copy mode bits from src to dst"""
+    if hasattr(os, 'chmod'):
+        st = os.stat(src)
+        mode = stat.S_IMODE(st.st_mode)
+        os.chmod(dst, mode)
+
+def copystat(src: str, dst: str) -> None:
+    """Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
+    st = os.stat(src)
+    mode = stat.S_IMODE(st.st_mode)
+    if hasattr(os, 'utime'):
+        os.utime(dst, (st.st_atime, st.st_mtime))
+    if hasattr(os, 'chmod'):
+        os.chmod(dst, mode)
+    if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
+        try:
+            os.chflags(dst, st.st_flags)
+        except OSError as why:
+            if (not hasattr(errno, 'EOPNOTSUPP') or
+                why.errno != errno.EOPNOTSUPP):
+                raise
+
+def copy(src: str, dst: str) -> None:
+    """Copy data and mode bits ("cp src dst").
+
+    The destination may be a directory.
+
+    """
+    if os.path.isdir(dst):
+        dst = os.path.join(dst, os.path.basename(src))
+    copyfile(src, dst)
+    copymode(src, dst)
+
+def copy2(src: str, dst: str) -> None:
+    """Copy data and all stat info ("cp -p src dst").
+
+    The destination may be a directory.
+
+    """
+    if os.path.isdir(dst):
+        dst = os.path.join(dst, os.path.basename(src))
+    copyfile(src, dst)
+    copystat(src, dst)
+
+def ignore_patterns(*patterns: str) -> Callable[[str, List[str]],
+                                                Iterable[str]]:
+    """Function that can be used as copytree() ignore parameter.
+
+    Patterns is a sequence of glob-style patterns
+    that are used to exclude files"""
+    def _ignore_patterns(path: str, names: List[str]) -> Iterable[str]:
+        ignored_names = []  # type: List[str]
+        for pattern in patterns:
+            ignored_names.extend(fnmatch.filter(names, pattern))
+        return set(ignored_names)
+    return _ignore_patterns
+
+def copytree(src: str, dst: str, symlinks: bool = False,
+             ignore: Callable[[str, List[str]], Iterable[str]] = None,
+             copy_function: Callable[[str, str], None] = copy2,
+             ignore_dangling_symlinks: bool = False) -> None:
+    """Recursively copy a directory tree.
+
+    The destination directory must not already exist.
+    If exception(s) occur, an Error is raised with a list of reasons.
+
+    If the optional symlinks flag is true, symbolic links in the
+    source tree result in symbolic links in the destination tree; if
+    it is false, the contents of the files pointed to by symbolic
+    links are copied. If the file pointed by the symlink doesn't
+    exist, an exception will be added in the list of errors raised in
+    an Error exception at the end of the copy process.
+
+    You can set the optional ignore_dangling_symlinks flag to true if you
+    want to silence this exception. Notice that this has no effect on
+    platforms that don't support os.symlink.
+
+    The optional ignore argument is a callable. If given, it
+    is called with the `src` parameter, which is the directory
+    being visited by copytree(), and `names` which is the list of
+    `src` contents, as returned by os.listdir():
+
+        callable(src, names) -> ignored_names
+
+    Since copytree() is called recursively, the callable will be
+    called once for each directory that is copied. It returns a
+    list of names relative to the `src` directory that should
+    not be copied.
+
+    The optional copy_function argument is a callable that will be used
+    to copy each file. It will be called with the source path and the
+    destination path as arguments. By default, copy2() is used, but any
+    function that supports the same signature (like copy()) can be used.
+
+    """
+    names = os.listdir(src)
+    if ignore is not None:
+        ignored_names = ignore(src, names)
+    else:
+        ignored_names = set()
+
+    os.makedirs(dst)
+    errors = []  # type: List[Tuple[str, str, str]]
+    for name in names:
+        if name in ignored_names:
+            continue
+        srcname = os.path.join(src, name)
+        dstname = os.path.join(dst, name)
+        try:
+            if os.path.islink(srcname):
+                linkto = os.readlink(srcname)
+                if symlinks:
+                    os.symlink(linkto, dstname)
+                else:
+                    # ignore dangling symlink if the flag is on
+                    if not os.path.exists(linkto) and ignore_dangling_symlinks:
+                        continue
+                    # otherwise let the copy occurs. copy2 will raise an error
+                    copy_function(srcname, dstname)
+            elif os.path.isdir(srcname):
+                copytree(srcname, dstname, symlinks, ignore, copy_function)
+            else:
+                # Will raise a SpecialFileError for unsupported file types
+                copy_function(srcname, dstname)
+        # catch the Error from the recursive copytree so that we can
+        # continue with other files
+        except Error as err:
+            errors.extend(err.args[0])
+        except EnvironmentError as why:
+            errors.append((srcname, dstname, str(why)))
+    try:
+        copystat(src, dst)
+    except OSError as why:
+        if _WindowsError is not None and isinstance(why, _WindowsError):
+            # Copying file access times may fail on Windows
+            pass
+        else:
+            errors.append((src, dst, str(why)))
+    if errors:
+        raise Error(errors)
+
+def rmtree(path: str, ignore_errors: bool = False,
+           onerror: Callable[[Any, str, Tuple[type, BaseException, TracebackType]],
+                              None] = None) -> None:
+    """Recursively delete a directory tree.
+
+    If ignore_errors is set, errors are ignored; otherwise, if onerror
+    is set, it is called to handle the error with arguments (func,
+    path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
+    path is the argument to that function that caused it to fail; and
+    exc_info is a tuple returned by sys.exc_info().  If ignore_errors
+    is false and onerror is None, an exception is raised.
+
+    """
+    if ignore_errors:
+        def _onerror(x: Any, y: str,
+                     z: Tuple[type, BaseException, TracebackType]) -> None:
+            pass
+        onerror = _onerror
+    elif onerror is None:
+        def __onerror(x: Any, y: str,
+                      z: Tuple[type, BaseException, TracebackType]) -> None:
+            raise
+        onerror = __onerror
+    try:
+        if os.path.islink(path):
+            # symlinks to directories are forbidden, see bug #1669
+            raise OSError("Cannot call rmtree on a symbolic link")
+    except OSError:
+        onerror(os.path.islink, path, sys.exc_info())
+        # can't continue even if onerror hook returns
+        return
+    names = []  # type: List[str]
+    try:
+        names = os.listdir(path)
+    except os.error as err:
+        onerror(os.listdir, path, sys.exc_info())
+    for name in names:
+        fullname = os.path.join(path, name)
+        try:
+            mode = os.lstat(fullname).st_mode
+        except os.error:
+            mode = 0
+        if stat.S_ISDIR(mode):
+            rmtree(fullname, ignore_errors, onerror)
+        else:
+            try:
+                os.remove(fullname)
+            except os.error as err:
+                onerror(os.remove, fullname, sys.exc_info())
+    try:
+        os.rmdir(path)
+    except os.error:
+        onerror(os.rmdir, path, sys.exc_info())
+
+
+def _basename(path: str) -> str:
+    # A basename() variant which first strips the trailing slash, if present.
+    # Thus we always get the last component of the path, even for directories.
+    return os.path.basename(path.rstrip(os.path.sep))
+
+def move(src: str, dst: str) -> None:
+    """Recursively move a file or directory to another location. This is
+    similar to the Unix "mv" command.
+
+    If the destination is a directory or a symlink to a directory, the source
+    is moved inside the directory. The destination path must not already
+    exist.
+
+    If the destination already exists but is not a directory, it may be
+    overwritten depending on os.rename() semantics.
+
+    If the destination is on our current filesystem, then rename() is used.
+    Otherwise, src is copied to the destination and then removed.
+    A lot more could be done here...  A look at a mv.c shows a lot of
+    the issues this implementation glosses over.
+
+    """
+    real_dst = dst
+    if os.path.isdir(dst):
+        if _samefile(src, dst):
+            # We might be on a case insensitive filesystem,
+            # perform the rename anyway.
+            os.rename(src, dst)
+            return
+
+        real_dst = os.path.join(dst, _basename(src))
+        if os.path.exists(real_dst):
+            raise Error("Destination path '%s' already exists" % real_dst)
+    try:
+        os.rename(src, real_dst)
+    except OSError as exc:
+        if os.path.isdir(src):
+            if _destinsrc(src, dst):
+                raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
+            copytree(src, real_dst, symlinks=True)
+            rmtree(src)
+        else:
+            copy2(src, real_dst)
+            os.unlink(src)
+
+def _destinsrc(src: str, dst: str) -> bool:
+    src = abspath(src)
+    dst = abspath(dst)
+    if not src.endswith(os.path.sep):
+        src += os.path.sep
+    if not dst.endswith(os.path.sep):
+        dst += os.path.sep
+    return dst.startswith(src)
+
+def _get_gid(name: str) -> int:
+    """Returns a gid, given a group name."""
+    if getgrnam is None or name is None:
+        return None
+    try:
+        result = getgrnam(name)
+    except KeyError:
+        result = None
+    if result is not None:
+        return result.gr_gid
+    return None
+
+def _get_uid(name: str) -> int:
+    """Returns an uid, given a user name."""
+    if getpwnam is None or name is None:
+        return None
+    try:
+        result = getpwnam(name)
+    except KeyError:
+        result = None
+    if result is not None:
+        return result.pw_uid
+    return None
+
+def _make_tarball(base_name: str, base_dir: str, compress: str = "gzip",
+                  verbose: bool = False, dry_run: bool = False,
+                  owner: str = None, group: str = None,
+                  logger: Any = None) -> str:
+    """Create a (possibly compressed) tar file from all the files under
+    'base_dir'.
+
+    'compress' must be "gzip" (the default), "bzip2", or None.
+
+    'owner' and 'group' can be used to define an owner and a group for the
+    archive that is being built. If not provided, the current owner and group
+    will be used.
+
+    The output tar file will be named 'base_name' +  ".tar", possibly plus
+    the appropriate compression extension (".gz", or ".bz2").
+
+    Returns the output filename.
+    """
+    tar_compression = {'gzip': 'gz', None: ''}
+    compress_ext = {'gzip': '.gz'}
+
+    if _BZ2_SUPPORTED:
+        tar_compression['bzip2'] = 'bz2'
+        compress_ext['bzip2'] = '.bz2'
+
+    # flags for compression program, each element of list will be an argument
+    if compress is not None and compress not in compress_ext.keys():
+        raise ValueError("bad value for 'compress', or compression format not "
+                         "supported : {0}".format(compress))
+
+    archive_name = base_name + '.tar' + compress_ext.get(compress, '')
+    archive_dir = os.path.dirname(archive_name)
+
+    if not os.path.exists(archive_dir):
+        if logger is not None:
+            logger.info("creating %s", archive_dir)
+        if not dry_run:
+            os.makedirs(archive_dir)
+
+    # creating the tarball
+    if logger is not None:
+        logger.info('Creating tar archive')
+
+    uid = _get_uid(owner)
+    gid = _get_gid(group)
+
+    def _set_uid_gid(tarinfo):
+        if gid is not None:
+            tarinfo.gid = gid
+            tarinfo.gname = group
+        if uid is not None:
+            tarinfo.uid = uid
+            tarinfo.uname = owner
+        return tarinfo
+
+    if not dry_run:
+        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
+        try:
+            tar.add(base_dir, filter=_set_uid_gid)
+        finally:
+            tar.close()
+
+    return archive_name
+
+def _call_external_zip(base_dir: str, zip_filename: str, verbose: bool = False,
+                       dry_run: bool = False) -> None:
+    # XXX see if we want to keep an external call here
+    if verbose:
+        zipoptions = "-r"
+    else:
+        zipoptions = "-rq"
+    from distutils.errors import DistutilsExecError
+    from distutils.spawn import spawn
+    try:
+        spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
+    except DistutilsExecError:
+        # XXX really should distinguish between "couldn't find
+        # external 'zip' command" and "zip failed".
+        raise ExecError(("unable to create zip file '%s': "
+            "could neither import the 'zipfile' module nor "
+            "find a standalone zip utility") % zip_filename)
+
+def _make_zipfile(base_name: str, base_dir: str, verbose: bool = False,
+                  dry_run: bool = False, logger: Any = None) -> str:
+    """Create a zip file from all the files under 'base_dir'.
+
+    The output zip file will be named 'base_name' + ".zip".  Uses either the
+    "zipfile" Python module (if available) or the InfoZIP "zip" utility
+    (if installed and found on the default search path).  If neither tool is
+    available, raises ExecError.  Returns the name of the output zip
+    file.
+    """
+    zip_filename = base_name + ".zip"
+    archive_dir = os.path.dirname(base_name)
+
+    if not os.path.exists(archive_dir):
+        if logger is not None:
+            logger.info("creating %s", archive_dir)
+        if not dry_run:
+            os.makedirs(archive_dir)
+
+    # If zipfile module is not available, try spawning an external 'zip'
+    # command.
+    try:
+        import zipfile
+    except ImportError:
+        zipfile = None
+
+    if zipfile is None:
+        _call_external_zip(base_dir, zip_filename, verbose, dry_run)
+    else:
+        if logger is not None:
+            logger.info("creating '%s' and adding '%s' to it",
+                        zip_filename, base_dir)
+
+        if not dry_run:
+            zip = zipfile.ZipFile(zip_filename, "w",
+                                  compression=zipfile.ZIP_DEFLATED)
+
+            for dirpath, dirnames, filenames in os.walk(base_dir):
+                for name in filenames:
+                    path = os.path.normpath(os.path.join(dirpath, name))
+                    if os.path.isfile(path):
+                        zip.write(path, path)
+                        if logger is not None:
+                            logger.info("adding '%s'", path)
+            zip.close()
+
+    return zip_filename
+
+_ARCHIVE_FORMATS = {
+    'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
+    'tar':   (_make_tarball, [('compress', None)], "uncompressed tar file"),
+    'zip':   (_make_zipfile, [],"ZIP file")
+    } # type: Dict[str, Tuple[Any, Sequence[Tuple[str, str]], str]]
+
+if _BZ2_SUPPORTED:
+    _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
+                                "bzip2'ed tar-file")
+
+def get_archive_formats() -> List[Tuple[str, str]]:
+    """Returns a list of supported formats for archiving and unarchiving.
+
+    Each element of the returned sequence is a tuple (name, description)
+    """
+    formats = [(name, registry[2]) for name, registry in
+               _ARCHIVE_FORMATS.items()]
+    formats.sort()
+    return formats
+
+def register_archive_format(name: str, function: Any,
+                            extra_args: Sequence[Tuple[str, Any]] = None,
+                            description: str = '') -> None:
+    """Registers an archive format.
+
+    name is the name of the format. function is the callable that will be
+    used to create archives. If provided, extra_args is a sequence of
+    (name, value) tuples that will be passed as arguments to the callable.
+    description can be provided to describe the format, and will be returned
+    by the get_archive_formats() function.
+    """
+    if extra_args is None:
+        extra_args = []
+    if not callable(function):
+        raise TypeError('The %s object is not callable' % function)
+    if not isinstance(extra_args, (tuple, list)):
+        raise TypeError('extra_args needs to be a sequence')
+    for element in extra_args:
+        if not isinstance(element, (tuple, list)) or len(cast(tuple, element)) !=2 :
+            raise TypeError('extra_args elements are : (arg_name, value)')
+
+    _ARCHIVE_FORMATS[name] = (function, extra_args, description)
+
+def unregister_archive_format(name: str) -> None:
+    del _ARCHIVE_FORMATS[name]
+
+def make_archive(base_name: str, format: str, root_dir: str = None,
+                 base_dir: str = None, verbose: bool = False,
+                 dry_run: bool = False, owner: str = None,
+                 group: str = None, logger: Any = None) -> str:
+    """Create an archive file (eg. zip or tar).
+
+    'base_name' is the name of the file to create, minus any format-specific
+    extension; 'format' is the archive format: one of "zip", "tar", "bztar"
+    or "gztar".
+
+    'root_dir' is a directory that will be the root directory of the
+    archive; ie. we typically chdir into 'root_dir' before creating the
+    archive.  'base_dir' is the directory where we start archiving from;
+    ie. 'base_dir' will be the common prefix of all files and
+    directories in the archive.  'root_dir' and 'base_dir' both default
+    to the current directory.  Returns the name of the archive file.
+
+    'owner' and 'group' are used when creating a tar archive. By default,
+    uses the current owner and group.
+    """
+    save_cwd = os.getcwd()
+    if root_dir is not None:
+        if logger is not None:
+            logger.debug("changing into '%s'", root_dir)
+        base_name = os.path.abspath(base_name)
+        if not dry_run:
+            os.chdir(root_dir)
+
+    if base_dir is None:
+        base_dir = os.curdir
+
+    kwargs = {'dry_run': dry_run, 'logger': logger}
+
+    try:
+        format_info = _ARCHIVE_FORMATS[format]
+    except KeyError:
+        raise ValueError("unknown archive format '%s'" % format)
+
+    func = format_info[0]
+    for arg, val in format_info[1]:
+        kwargs[arg] = val
+
+    if format != 'zip':
+        kwargs['owner'] = owner
+        kwargs['group'] = group
+
+    try:
+        filename = func(base_name, base_dir, **kwargs)
+    finally:
+        if root_dir is not None:
+            if logger is not None:
+                logger.debug("changing back to '%s'", save_cwd)
+            os.chdir(save_cwd)
+
+    return filename
+
+
+def get_unpack_formats() -> List[Tuple[str, List[str], str]]:
+    """Returns a list of supported formats for unpacking.
+
+    Each element of the returned sequence is a tuple
+    (name, extensions, description)
+    """
+    formats = [(name, info[0], info[3]) for name, info in
+               _UNPACK_FORMATS.items()]
+    formats.sort()
+    return formats
+
+def _check_unpack_options(extensions: List[str], function: Any,
+                          extra_args: Sequence[Tuple[str, Any]]) -> None:
+    """Checks what gets registered as an unpacker."""
+    # first make sure no other unpacker is registered for this extension
+    existing_extensions = {}  # type: Dict[str, str]
+    for name, info in _UNPACK_FORMATS.items():
+        for ext in info[0]:
+            existing_extensions[ext] = name
+
+    for extension in extensions:
+        if extension in existing_extensions:
+            msg = '%s is already registered for "%s"'
+            raise RegistryError(msg % (extension,
+                                       existing_extensions[extension]))
+
+    if not callable(function):
+        raise TypeError('The registered function must be a callable')
+
+
+def register_unpack_format(name: str, extensions: List[str], function: Any,
+                           extra_args: Sequence[Tuple[str, Any]] = None,
+                           description: str = '') -> None:
+    """Registers an unpack format.
+
+    `name` is the name of the format. `extensions` is a list of extensions
+    corresponding to the format.
+
+    `function` is the callable that will be
+    used to unpack archives. The callable will receive archives to unpack.
+    If it's unable to handle an archive, it needs to raise a ReadError
+    exception.
+
+    If provided, `extra_args` is a sequence of
+    (name, value) tuples that will be passed as arguments to the callable.
+    description can be provided to describe the format, and will be returned
+    by the get_unpack_formats() function.
+    """
+    if extra_args is None:
+        extra_args = []
+    _check_unpack_options(extensions, function, extra_args)
+    _UNPACK_FORMATS[name] = extensions, function, extra_args, description
+
+def unregister_unpack_format(name: str) -> None:
+    """Removes the pack format from the registery."""
+    del _UNPACK_FORMATS[name]
+
+def _ensure_directory(path: str) -> None:
+    """Ensure that the parent directory of `path` exists"""
+    dirname = os.path.dirname(path)
+    if not os.path.isdir(dirname):
+        os.makedirs(dirname)
+
+def _unpack_zipfile(filename: str, extract_dir: str) -> None:
+    """Unpack zip `filename` to `extract_dir`
+    """
+    try:
+        import zipfile
+    except ImportError:
+        raise ReadError('zlib not supported, cannot unpack this archive.')
+
+    if not zipfile.is_zipfile(filename):
+        raise ReadError("%s is not a zip file" % filename)
+
+    zip = zipfile.ZipFile(filename)
+    try:
+        for info in zip.infolist():
+            name = info.filename
+
+            # don't extract absolute paths or ones with .. in them
+            if name.startswith('/') or '..' in name:
+                continue
+
+            target = os.path.join(extract_dir, *name.split('/'))
+            if not target:
+                continue
+
+            _ensure_directory(target)
+            if not name.endswith('/'):
+                # file
+                data = zip.read(info.filename)
+                f = open(target,'wb')
+                try:
+                    f.write(data)
+                finally:
+                    f.close()
+                    del data
+    finally:
+        zip.close()
+
+def _unpack_tarfile(filename: str, extract_dir: str) -> None:
+    """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+    """
+    try:
+        tarobj = tarfile.open(filename)
+    except tarfile.TarError:
+        raise ReadError(
+            "%s is not a compressed or uncompressed tar file" % filename)
+    try:
+        tarobj.extractall(extract_dir)
+    finally:
+        tarobj.close()
+
+_UNPACK_FORMATS = {
+    'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
+    'tar':   (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
+    'zip':   (['.zip'], _unpack_zipfile, [], "ZIP file")
+    } # type: Dict[str, Tuple[List[str], Any, Sequence[Tuple[str, Any]], str]]
+
+if _BZ2_SUPPORTED:
+    _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
+                                "bzip2'ed tar-file")
+
+def _find_unpack_format(filename: str) -> str:
+    for name, info in _UNPACK_FORMATS.items():
+        for extension in info[0]:
+            if filename.endswith(extension):
+                return name
+    return None
+
+def unpack_archive(filename: str, extract_dir: str = None,
+                   format: str = None) -> None:
+    """Unpack an archive.
+
+    `filename` is the name of the archive.
+
+    `extract_dir` is the name of the target directory, where the archive
+    is unpacked. If not provided, the current working directory is used.
+
+    `format` is the archive format: one of "zip", "tar", or "gztar". Or any
+    other registered format. If not provided, unpack_archive will use the
+    filename extension and see if an unpacker was registered for that
+    extension.
+
+    In case none is found, a ValueError is raised.
+    """
+    if extract_dir is None:
+        extract_dir = os.getcwd()
+
+    if format is not None:
+        try:
+            format_info = _UNPACK_FORMATS[format]
+        except KeyError:
+            raise ValueError("Unknown unpack format '{0}'".format(format))
+
+        func = format_info[1]
+        func(filename, extract_dir, **dict(format_info[2]))
+    else:
+        # we need to look at the registered unpackers supported extensions
+        format = _find_unpack_format(filename)
+        if format is None:
+            raise ReadError("Unknown archive format '{0}'".format(filename))
+
+        func = _UNPACK_FORMATS[format][1]
+        kwargs = dict(_UNPACK_FORMATS[format][2])
+        func(filename, extract_dir, **kwargs)
diff --git a/test-data/stdlib-samples/3.2/subprocess.py b/test-data/stdlib-samples/3.2/subprocess.py
new file mode 100644
index 0000000..70f5c8d
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/subprocess.py
@@ -0,0 +1,1704 @@
+# subprocess - Subprocesses with accessible I/O streams
+#
+# For more information about this module, see PEP 324.
+#
+# Copyright (c) 2003-2005 by Peter Astrand <astrand at lysator.liu.se>
+#
+# Licensed to PSF under a Contributor Agreement.
+# See http://www.python.org/2.4/license for licensing details.
+
+r"""subprocess - Subprocesses with accessible I/O streams
+
+This module allows you to spawn processes, connect to their
+input/output/error pipes, and obtain their return codes.  This module
+intends to replace several other, older modules and functions, like:
+
+os.system
+os.spawn*
+
+Information about how the subprocess module can be used to replace these
+modules and functions can be found below.
+
+
+
+Using the subprocess module
+===========================
+This module defines one class called Popen:
+
+class Popen(args, bufsize=0, executable=None,
+            stdin=None, stdout=None, stderr=None,
+            preexec_fn=None, close_fds=True, shell=False,
+            cwd=None, env=None, universal_newlines=False,
+            startupinfo=None, creationflags=0,
+            restore_signals=True, start_new_session=False, pass_fds=()):
+
+
+Arguments are:
+
+args should be a string, or a sequence of program arguments.  The
+program to execute is normally the first item in the args sequence or
+string, but can be explicitly set by using the executable argument.
+
+On POSIX, with shell=False (default): In this case, the Popen class
+uses os.execvp() to execute the child program.  args should normally
+be a sequence.  A string will be treated as a sequence with the string
+as the only item (the program to execute).
+
+On POSIX, with shell=True: If args is a string, it specifies the
+command string to execute through the shell.  If args is a sequence,
+the first item specifies the command string, and any additional items
+will be treated as additional shell arguments.
+
+On Windows: the Popen class uses CreateProcess() to execute the child
+program, which operates on strings.  If args is a sequence, it will be
+converted to a string using the list2cmdline method.  Please note that
+not all MS Windows applications interpret the command line the same
+way: The list2cmdline is designed for applications using the same
+rules as the MS C runtime.
+
+bufsize, if given, has the same meaning as the corresponding argument
+to the built-in open() function: 0 means unbuffered, 1 means line
+buffered, any other positive value means use a buffer of
+(approximately) that size.  A negative bufsize means to use the system
+default, which usually means fully buffered.  The default value for
+bufsize is 0 (unbuffered).
+
+stdin, stdout and stderr specify the executed programs' standard
+input, standard output and standard error file handles, respectively.
+Valid values are PIPE, an existing file descriptor (a positive
+integer), an existing file object, and None.  PIPE indicates that a
+new pipe to the child should be created.  With None, no redirection
+will occur; the child's file handles will be inherited from the
+parent.  Additionally, stderr can be STDOUT, which indicates that the
+stderr data from the applications should be captured into the same
+file handle as for stdout.
+
+On POSIX, if preexec_fn is set to a callable object, this object will be
+called in the child process just before the child is executed.  The use
+of preexec_fn is not thread safe, using it in the presence of threads
+could lead to a deadlock in the child process before the new executable
+is executed.
+
+If close_fds is true, all file descriptors except 0, 1 and 2 will be
+closed before the child process is executed.  The default for close_fds
+varies by platform:  Always true on POSIX.  True when stdin/stdout/stderr
+are None on Windows, false otherwise.
+
+pass_fds is an optional sequence of file descriptors to keep open between the
+parent and child.  Providing any pass_fds implicitly sets close_fds to true.
+
+if shell is true, the specified command will be executed through the
+shell.
+
+If cwd is not None, the current directory will be changed to cwd
+before the child is executed.
+
+On POSIX, if restore_signals is True all signals that Python sets to
+SIG_IGN are restored to SIG_DFL in the child process before the exec.
+Currently this includes the SIGPIPE, SIGXFZ and SIGXFSZ signals.  This
+parameter does nothing on Windows.
+
+On POSIX, if start_new_session is True, the setsid() system call will be made
+in the child process prior to executing the command.
+
+If env is not None, it defines the environment variables for the new
+process.
+
+If universal_newlines is true, the file objects stdout and stderr are
+opened as a text files, but lines may be terminated by any of '\n',
+the Unix end-of-line convention, '\r', the old Macintosh convention or
+'\r\n', the Windows convention.  All of these external representations
+are seen as '\n' by the Python program.  Note: This feature is only
+available if Python is built with universal newline support (the
+default).  Also, the newlines attribute of the file objects stdout,
+stdin and stderr are not updated by the communicate() method.
+
+The startupinfo and creationflags, if given, will be passed to the
+underlying CreateProcess() function.  They can specify things such as
+appearance of the main window and priority for the new process.
+(Windows only)
+
+
+This module also defines some shortcut functions:
+
+call(*popenargs, **kwargs):
+    Run command with arguments.  Wait for command to complete, then
+    return the returncode attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    >>> retcode = subprocess.call(["ls", "-l"])
+
+check_call(*popenargs, **kwargs):
+    Run command with arguments.  Wait for command to complete.  If the
+    exit code was zero then return, otherwise raise
+    CalledProcessError.  The CalledProcessError object will have the
+    return code in the returncode attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    >>> subprocess.check_call(["ls", "-l"])
+    0
+
+getstatusoutput(cmd):
+    Return (status, output) of executing cmd in a shell.
+
+    Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple
+    (status, output).  cmd is actually run as '{ cmd ; } 2>&1', so that the
+    returned output will contain output or error messages. A trailing newline
+    is stripped from the output. The exit status for the command can be
+    interpreted according to the rules for the C function wait().  Example:
+
+    >>> subprocess.getstatusoutput('ls /bin/ls')
+    (0, '/bin/ls')
+    >>> subprocess.getstatusoutput('cat /bin/junk')
+    (256, 'cat: /bin/junk: No such file or directory')
+    >>> subprocess.getstatusoutput('/bin/junk')
+    (256, 'sh: /bin/junk: not found')
+
+getoutput(cmd):
+    Return output (stdout or stderr) of executing cmd in a shell.
+
+    Like getstatusoutput(), except the exit status is ignored and the return
+    value is a string containing the command's output.  Example:
+
+    >>> subprocess.getoutput('ls /bin/ls')
+    '/bin/ls'
+
+check_output(*popenargs, **kwargs):
+    Run command with arguments and return its output as a byte string.
+
+    If the exit code was non-zero it raises a CalledProcessError.  The
+    CalledProcessError object will have the return code in the returncode
+    attribute and output in the output attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    >>> output = subprocess.check_output(["ls", "-l", "/dev/null"])
+
+
+Exceptions
+----------
+Exceptions raised in the child process, before the new program has
+started to execute, will be re-raised in the parent.  Additionally,
+the exception object will have one extra attribute called
+'child_traceback', which is a string containing traceback information
+from the childs point of view.
+
+The most common exception raised is OSError.  This occurs, for
+example, when trying to execute a non-existent file.  Applications
+should prepare for OSErrors.
+
+A ValueError will be raised if Popen is called with invalid arguments.
+
+check_call() and check_output() will raise CalledProcessError, if the
+called process returns a non-zero return code.
+
+
+Security
+--------
+Unlike some other popen functions, this implementation will never call
+/bin/sh implicitly.  This means that all characters, including shell
+metacharacters, can safely be passed to child processes.
+
+
+Popen objects
+=============
+Instances of the Popen class have the following methods:
+
+poll()
+    Check if child process has terminated.  Returns returncode
+    attribute.
+
+wait()
+    Wait for child process to terminate.  Returns returncode attribute.
+
+communicate(input=None)
+    Interact with process: Send data to stdin.  Read data from stdout
+    and stderr, until end-of-file is reached.  Wait for process to
+    terminate.  The optional input argument should be a string to be
+    sent to the child process, or None, if no data should be sent to
+    the child.
+
+    communicate() returns a tuple (stdout, stderr).
+
+    Note: The data read is buffered in memory, so do not use this
+    method if the data size is large or unlimited.
+
+The following attributes are also available:
+
+stdin
+    If the stdin argument is PIPE, this attribute is a file object
+    that provides input to the child process.  Otherwise, it is None.
+
+stdout
+    If the stdout argument is PIPE, this attribute is a file object
+    that provides output from the child process.  Otherwise, it is
+    None.
+
+stderr
+    If the stderr argument is PIPE, this attribute is file object that
+    provides error output from the child process.  Otherwise, it is
+    None.
+
+pid
+    The process ID of the child process.
+
+returncode
+    The child return code.  A None value indicates that the process
+    hasn't terminated yet.  A negative value -N indicates that the
+    child was terminated by signal N (POSIX only).
+
+
+Replacing older functions with the subprocess module
+====================================================
+In this section, "a ==> b" means that b can be used as a replacement
+for a.
+
+Note: All functions in this section fail (more or less) silently if
+the executed program cannot be found; this module raises an OSError
+exception.
+
+In the following examples, we assume that the subprocess module is
+imported with "from subprocess import *".
+
+
+Replacing /bin/sh shell backquote
+---------------------------------
+output=`mycmd myarg`
+==>
+output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0]
+
+
+Replacing shell pipe line
+-------------------------
+output=`dmesg | grep hda`
+==>
+p1 = Popen(["dmesg"], stdout=PIPE)
+p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
+output = p2.communicate()[0]
+
+
+Replacing os.system()
+---------------------
+sts = os.system("mycmd" + " myarg")
+==>
+p = Popen("mycmd" + " myarg", shell=True)
+pid, sts = os.waitpid(p.pid, 0)
+
+Note:
+
+* Calling the program through the shell is usually not required.
+
+* It's easier to look at the returncode attribute than the
+  exitstatus.
+
+A more real-world example would look like this:
+
+try:
+    retcode = call("mycmd" + " myarg", shell=True)
+    if retcode < 0:
+        print("Child was terminated by signal", -retcode, file=sys.stderr)
+    else:
+        print("Child returned", retcode, file=sys.stderr)
+except OSError as e:
+    print("Execution failed:", e, file=sys.stderr)
+
+
+Replacing os.spawn*
+-------------------
+P_NOWAIT example:
+
+pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg")
+==>
+pid = Popen(["/bin/mycmd", "myarg"]).pid
+
+
+P_WAIT example:
+
+retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg")
+==>
+retcode = call(["/bin/mycmd", "myarg"])
+
+
+Vector example:
+
+os.spawnvp(os.P_NOWAIT, path, args)
+==>
+Popen([path] + args[1:])
+
+
+Environment example:
+
+os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env)
+==>
+Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"})
+"""
+
+import sys
+mswindows = (sys.platform == "win32")
+
+import io
+import os
+import traceback
+import gc
+import signal
+import builtins
+import warnings
+import errno
+
+from typing import (
+    Any, Tuple, List, Sequence, Callable, Mapping, cast, Set, Dict, IO,
+    TextIO, AnyStr
+)
+from types import TracebackType
+
+# Exception classes used by this module.
+class CalledProcessError(Exception):
+    """This exception is raised when a process run by check_call() or
+    check_output() returns a non-zero exit status.
+    The exit status will be stored in the returncode attribute;
+    check_output() will also store the output in the output attribute.
+    """
+    def __init__(self, returncode: int, cmd: str, output: Any = None) -> None:
+        self.returncode = returncode
+        self.cmd = cmd
+        self.output = output
+    def __str__(self) -> str:
+        return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
+
+
+if mswindows:
+    import threading
+    import msvcrt
+    import _subprocess
+    class STARTUPINFO:
+        dwFlags = 0
+        hStdInput = cast(Any, None)
+        hStdOutput = cast(Any, None)
+        hStdError = cast(Any, None)
+        wShowWindow = 0
+    class pywintypes:
+        error = IOError
+else:
+    import select
+    _has_poll = hasattr(select, 'poll')
+    import fcntl
+    import pickle
+
+    try:
+        import _posixsubprocess
+        have_posixsubprocess = True
+    except ImportError:
+        have_posixsubprocess = False
+        warnings.warn("The _posixsubprocess module is not being used. "
+                      "Child process reliability may suffer if your "
+                      "program uses threads.", RuntimeWarning)
+
+    # When select or poll has indicated that the file is writable,
+    # we can write up to _PIPE_BUF bytes without risk of blocking.
+    # POSIX defines PIPE_BUF as >= 512.
+    _PIPE_BUF = getattr(select, 'PIPE_BUF', 512) # type: int
+
+    _FD_CLOEXEC = getattr(fcntl, 'FD_CLOEXEC', 1) # type: int
+
+    def _set_cloexec(fd: int, cloexec: bool) -> None:
+        old = fcntl.fcntl(fd, fcntl.F_GETFD)
+        if cloexec:
+            fcntl.fcntl(fd, fcntl.F_SETFD, old | _FD_CLOEXEC)
+        else:
+            fcntl.fcntl(fd, fcntl.F_SETFD, old & ~_FD_CLOEXEC)
+
+    if have_posixsubprocess:
+        _create_pipe = _posixsubprocess.cloexec_pipe
+    else:
+        def __create_pipe() -> Tuple[int, int]:
+            fds = os.pipe()
+            _set_cloexec(fds[0], True)
+            _set_cloexec(fds[1], True)
+            return fds
+        _create_pipe = __create_pipe
+
+__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
+           "getoutput", "check_output", "CalledProcessError"]
+
+if mswindows:
+    from _subprocess import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
+                             STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
+                             STD_ERROR_HANDLE, SW_HIDE,
+                             STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW)
+
+    __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
+                    "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
+                    "STD_ERROR_HANDLE", "SW_HIDE",
+                    "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW"])
+try:
+    MAXFD = os.sysconf("SC_OPEN_MAX")
+except:
+    MAXFD = 256
+
+# This lists holds Popen instances for which the underlying process had not
+# exited at the time its __del__ method got called: those processes are wait()ed
+# for synchronously from _cleanup() when a new Popen object is created, to avoid
+# zombie processes.
+_active = []  # type: List[Popen]
+
+def _cleanup() -> None:
+    for inst in _active[:]:
+        res = inst._internal_poll(_deadstate=sys.maxsize)
+        if res is not None:
+            try:
+                _active.remove(inst)
+            except ValueError:
+                # This can happen if two threads create a new Popen instance.
+                # It's harmless that it was already removed, so ignore.
+                pass
+
+PIPE = -1
+STDOUT = -2
+
+
+def _eintr_retry_call(func: Any, *args: Any) -> Any:
+    while True:
+        try:
+            return func(*args)
+        except (OSError, IOError) as e:
+            if e.errno == errno.EINTR:
+                continue
+            raise
+
+
+def call(*popenargs: Any, **kwargs: Any) -> int:
+    """Run command with arguments.  Wait for command to complete, then
+    return the returncode attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    retcode = call(["ls", "-l"])
+    """
+    return Popen(*popenargs, **kwargs).wait()
+
+
+def check_call(*popenargs: Any, **kwargs: Any) -> int:
+    """Run command with arguments.  Wait for command to complete.  If
+    the exit code was zero then return, otherwise raise
+    CalledProcessError.  The CalledProcessError object will have the
+    return code in the returncode attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    check_call(["ls", "-l"])
+    """
+    retcode = call(*popenargs, **kwargs)
+    if retcode:
+        cmd = kwargs.get("args")
+        if cmd is None:
+            cmd = popenargs[0]
+        raise CalledProcessError(retcode, cmd)
+    return 0
+
+
+def check_output(*popenargs: Any, **kwargs: Any) -> bytes:
+    r"""Run command with arguments and return its output as a byte string.
+
+    If the exit code was non-zero it raises a CalledProcessError.  The
+    CalledProcessError object will have the return code in the returncode
+    attribute and output in the output attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    >>> check_output(["ls", "-l", "/dev/null"])
+    b'crw-rw-rw- 1 root root 1, 3 Oct 18  2007 /dev/null\n'
+
+    The stdout argument is not allowed as it is used internally.
+    To capture standard error in the result, use stderr=STDOUT.
+
+    >>> check_output(["/bin/sh", "-c",
+    ...               "ls -l non_existent_file ; exit 0"],
+    ...              stderr=STDOUT)
+    b'ls: non_existent_file: No such file or directory\n'
+    """
+    if 'stdout' in kwargs:
+        raise ValueError('stdout argument not allowed, it will be overridden.')
+    kwargs['stdout'] = PIPE
+    process = Popen(*popenargs, **kwargs)
+    output, unused_err = process.communicate()
+    retcode = process.poll()
+    if retcode:
+        cmd = kwargs.get("args")
+        if cmd is None:
+            cmd = popenargs[0]
+        raise CalledProcessError(retcode, cmd, output=output)
+    return output
+
+
+def list2cmdline(seq: Sequence[str]) -> str:
+    """
+    Translate a sequence of arguments into a command line
+    string, using the same rules as the MS C runtime:
+
+    1) Arguments are delimited by white space, which is either a
+       space or a tab.
+
+    2) A string surrounded by double quotation marks is
+       interpreted as a single argument, regardless of white space
+       contained within.  A quoted string can be embedded in an
+       argument.
+
+    3) A double quotation mark preceded by a backslash is
+       interpreted as a literal double quotation mark.
+
+    4) Backslashes are interpreted literally, unless they
+       immediately precede a double quotation mark.
+
+    5) If backslashes immediately precede a double quotation mark,
+       every pair of backslashes is interpreted as a literal
+       backslash.  If the number of backslashes is odd, the last
+       backslash escapes the next double quotation mark as
+       described in rule 3.
+    """
+
+    # See
+    # http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
+    # or search http://msdn.microsoft.com for
+    # "Parsing C++ Command-Line Arguments"
+    result = []  # type: List[str]
+    needquote = False
+    for arg in seq:
+        bs_buf = []  # type: List[str]
+
+        # Add a space to separate this argument from the others
+        if result:
+            result.append(' ')
+
+        needquote = (" " in arg) or ("\t" in arg) or not arg
+        if needquote:
+            result.append('"')
+
+        for c in arg:
+            if c == '\\':
+                # Don't know if we need to double yet.
+                bs_buf.append(c)
+            elif c == '"':
+                # Double backslashes.
+                result.append('\\' * len(bs_buf)*2)
+                bs_buf = []
+                result.append('\\"')
+            else:
+                # Normal char
+                if bs_buf:
+                    result.extend(bs_buf)
+                    bs_buf = []
+                result.append(c)
+
+        # Add remaining backslashes, if any.
+        if bs_buf:
+            result.extend(bs_buf)
+
+        if needquote:
+            result.extend(bs_buf)
+            result.append('"')
+
+    return ''.join(result)
+
+
+# Various tools for executing commands and looking at their output and status.
+#
+# NB This only works (and is only relevant) for POSIX.
+
+def getstatusoutput(cmd: str) -> Tuple[int, str]:
+    """Return (status, output) of executing cmd in a shell.
+
+    Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple
+    (status, output).  cmd is actually run as '{ cmd ; } 2>&1', so that the
+    returned output will contain output or error messages.  A trailing newline
+    is stripped from the output.  The exit status for the command can be
+    interpreted according to the rules for the C function wait().  Example:
+
+    >>> import subprocess
+    >>> subprocess.getstatusoutput('ls /bin/ls')
+    (0, '/bin/ls')
+    >>> subprocess.getstatusoutput('cat /bin/junk')
+    (256, 'cat: /bin/junk: No such file or directory')
+    >>> subprocess.getstatusoutput('/bin/junk')
+    (256, 'sh: /bin/junk: not found')
+    """
+    pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
+    text = pipe.read()
+    sts = pipe.close()
+    if sts is None: sts = 0
+    if text[-1:] == '\n': text = text[:-1]
+    return sts, text
+
+
+def getoutput(cmd: str) -> str:
+    """Return output (stdout or stderr) of executing cmd in a shell.
+
+    Like getstatusoutput(), except the exit status is ignored and the return
+    value is a string containing the command's output.  Example:
+
+    >>> import subprocess
+    >>> subprocess.getoutput('ls /bin/ls')
+    '/bin/ls'
+    """
+    return getstatusoutput(cmd)[1]
+
+
+_PLATFORM_DEFAULT_CLOSE_FDS = object()
+
+
+class Popen(object):
+    def __init__(self, args: Sequence[Any], bufsize: int = 0,
+                 executable: str = None, stdin: Any = None,
+                 stdout: Any = None, stderr: Any = None,
+                 preexec_fn: Callable[[], Any] = None,
+                 close_fds: Any = _PLATFORM_DEFAULT_CLOSE_FDS,
+                 shell: int = False, cwd: str = None,
+                 env: Mapping[str, str] = None,
+                 universal_newlines: int = False,
+                 startupinfo: 'STARTUPINFO' = None, creationflags: int = 0,
+                 restore_signals: bool = True, start_new_session: bool = False,
+                 pass_fds: Any = ()) -> None:
+        """Create new Popen instance."""
+        _cleanup()
+
+        self._child_created = False
+        if bufsize is None:
+            bufsize = 0  # Restore default
+        if not isinstance(bufsize, int):
+            raise TypeError("bufsize must be an integer")
+
+        if mswindows:
+            if preexec_fn is not None:
+                raise ValueError("preexec_fn is not supported on Windows "
+                                 "platforms")
+            any_stdio_set = (stdin is not None or stdout is not None or
+                             stderr is not None)
+            if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:
+                if any_stdio_set:
+                    close_fds = False
+                else:
+                    close_fds = True
+            elif close_fds and any_stdio_set:
+                raise ValueError(
+                        "close_fds is not supported on Windows platforms"
+                        " if you redirect stdin/stdout/stderr")
+        else:
+            # POSIX
+            if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:
+                close_fds = True
+            if pass_fds and not close_fds:
+                warnings.warn("pass_fds overriding close_fds.", RuntimeWarning)
+                close_fds = True
+            if startupinfo is not None:
+                raise ValueError("startupinfo is only supported on Windows "
+                                 "platforms")
+            if creationflags != 0:
+                raise ValueError("creationflags is only supported on Windows "
+                                 "platforms")
+
+        self.stdin = None # type: IO[Any]
+        self.stdout = None # type: IO[Any]
+        self.stderr = None # type: IO[Any]
+        self.pid = None # type: int
+        self.returncode = None # type: int
+        self.universal_newlines = universal_newlines
+
+        # Input and output objects. The general principle is like
+        # this:
+        #
+        # Parent                   Child
+        # ------                   -----
+        # p2cwrite   ---stdin--->  p2cread
+        # c2pread    <--stdout---  c2pwrite
+        # errread    <--stderr---  errwrite
+        #
+        # On POSIX, the child objects are file descriptors.  On
+        # Windows, these are Windows file handles.  The parent objects
+        # are file descriptors on both platforms.  The parent objects
+        # are -1 when not using PIPEs. The child objects are -1
+        # when not redirecting.
+
+        (p2cread, p2cwrite,
+         c2pread, c2pwrite,
+         errread, errwrite) = self._get_handles(stdin, stdout, stderr)
+
+        # We wrap OS handles *before* launching the child, otherwise a
+        # quickly terminating child could make our fds unwrappable
+        # (see #8458).
+
+        if mswindows:
+            if p2cwrite != -1:
+                p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0)
+            if c2pread != -1:
+                c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0)
+            if errread != -1:
+                errread = msvcrt.open_osfhandle(errread.Detach(), 0)
+
+        if p2cwrite != -1:
+            self.stdin = io.open(p2cwrite, 'wb', bufsize)
+            if self.universal_newlines:
+                self.stdin = io.TextIOWrapper(self.stdin, write_through=True)
+        if c2pread != -1:
+            self.stdout = io.open(c2pread, 'rb', bufsize)
+            if universal_newlines:
+                self.stdout = io.TextIOWrapper(self.stdout)
+        if errread != -1:
+            self.stderr = io.open(errread, 'rb', bufsize)
+            if universal_newlines:
+                self.stderr = io.TextIOWrapper(self.stderr)
+
+        try:
+            self._execute_child(args, executable, preexec_fn, close_fds,
+                                pass_fds, cwd, env, universal_newlines,
+                                startupinfo, creationflags, shell,
+                                p2cread, p2cwrite,
+                                c2pread, c2pwrite,
+                                errread, errwrite,
+                                restore_signals, start_new_session)
+        except:
+            # Cleanup if the child failed starting
+            for f in filter(None, [self.stdin, self.stdout, self.stderr]):
+                try:
+                    f.close()
+                except EnvironmentError:
+                    # Ignore EBADF or other errors
+                    pass
+            raise
+
+
+    def _translate_newlines(self, data: bytes, encoding: str) -> str:
+        data = data.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
+        return data.decode(encoding)
+
+    def __enter__(self) -> 'Popen':
+        return self
+
+    def __exit__(self, type: type, value: BaseException,
+                 traceback: TracebackType) -> bool:
+        if self.stdout:
+            self.stdout.close()
+        if self.stderr:
+            self.stderr.close()
+        if self.stdin:
+            self.stdin.close()
+        # Wait for the process to terminate, to avoid zombies.
+        self.wait()
+        return False
+
+    def __del__(self, _maxsize: int = sys.maxsize,
+                _active: List['Popen'] = _active) -> None:
+        # If __init__ hasn't had a chance to execute (e.g. if it
+        # was passed an undeclared keyword argument), we don't
+        # have a _child_created attribute at all.
+        if not getattr(self, '_child_created', False):
+            # We didn't get to successfully create a child process.
+            return
+        # In case the child hasn't been waited on, check if it's done.
+        self._internal_poll(_deadstate=_maxsize)
+        if self.returncode is None and _active is not None:
+            # Child is still running, keep us alive until we can wait on it.
+            _active.append(self)
+
+
+    def communicate(self, input: Any = None) -> Tuple[Any, Any]:
+        """Interact with process: Send data to stdin.  Read data from
+        stdout and stderr, until end-of-file is reached.  Wait for
+        process to terminate.  The optional input argument should be a
+        string to be sent to the child process, or None, if no data
+        should be sent to the child.
+
+        communicate() returns a tuple (stdout, stderr)."""
+
+        # Optimization: If we are only using one pipe, or no pipe at
+        # all, using select() or threads is unnecessary.
+        if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
+            stdout = None # type: IO[Any]
+            stderr = None # type: IO[Any]
+            if self.stdin:
+                if input:
+                    try:
+                        self.stdin.write(input)
+                    except IOError as e:
+                        if e.errno != errno.EPIPE and e.errno != errno.EINVAL:
+                            raise
+                self.stdin.close()
+            elif self.stdout:
+                stdout = _eintr_retry_call(self.stdout.read)
+                self.stdout.close()
+            elif self.stderr:
+                stderr = _eintr_retry_call(self.stderr.read)
+                self.stderr.close()
+            self.wait()
+            return (stdout, stderr)
+
+        return self._communicate(input)
+
+
+    def poll(self) -> int:
+        return self._internal_poll()
+
+
+    if mswindows:
+        #
+        # Windows methods
+        #
+        def _get_handles(self, stdin: Any, stdout: Any,
+                         stderr: Any) -> Tuple[Any, Any, Any, Any, Any, Any]:
+            """Construct and return tuple with IO objects:
+            p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
+            """
+            if stdin is None and stdout is None and stderr is None:
+                return (-1, -1, -1, -1, -1, -1)
+
+            p2cread, p2cwrite = -1, -1 # type: (Any, Any)
+            c2pread, c2pwrite = -1, -1 # type: (Any, Any)
+            errread, errwrite = -1, -1 # type: (Any, Any)
+
+            if stdin is None:
+                p2cread = _subprocess.GetStdHandle(_subprocess.STD_INPUT_HANDLE)
+                if p2cread is None:
+                    p2cread, _ = _subprocess.CreatePipe(None, 0)
+            elif stdin == PIPE:
+                p2cread, p2cwrite = _subprocess.CreatePipe(None, 0)
+            elif isinstance(stdin, int):
+                p2cread = msvcrt.get_osfhandle(stdin)
+            else:
+                # Assuming file-like object
+                p2cread = msvcrt.get_osfhandle(stdin.fileno())
+            p2cread = self._make_inheritable(p2cread)
+
+            if stdout is None:
+                c2pwrite = _subprocess.GetStdHandle(_subprocess.STD_OUTPUT_HANDLE)
+                if c2pwrite is None:
+                    _, c2pwrite = _subprocess.CreatePipe(None, 0)
+            elif stdout == PIPE:
+                c2pread, c2pwrite = _subprocess.CreatePipe(None, 0)
+            elif isinstance(stdout, int):
+                c2pwrite = msvcrt.get_osfhandle(stdout)
+            else:
+                # Assuming file-like object
+                c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
+            c2pwrite = self._make_inheritable(c2pwrite)
+
+            if stderr is None:
+                errwrite = _subprocess.GetStdHandle(_subprocess.STD_ERROR_HANDLE)
+                if errwrite is None:
+                    _, errwrite = _subprocess.CreatePipe(None, 0)
+            elif stderr == PIPE:
+                errread, errwrite = _subprocess.CreatePipe(None, 0)
+            elif stderr == STDOUT:
+                errwrite = c2pwrite
+            elif isinstance(stderr, int):
+                errwrite = msvcrt.get_osfhandle(stderr)
+            else:
+                # Assuming file-like object
+                errwrite = msvcrt.get_osfhandle(stderr.fileno())
+            errwrite = self._make_inheritable(errwrite)
+
+            return (p2cread, p2cwrite,
+                    c2pread, c2pwrite,
+                    errread, errwrite)
+
+
+        def _make_inheritable(self, handle: _subprocess.Handle) -> int:
+            """Return a duplicate of handle, which is inheritable"""
+            return _subprocess.DuplicateHandle(_subprocess.GetCurrentProcess(),
+                                handle, _subprocess.GetCurrentProcess(), 0, 1,
+                                _subprocess.DUPLICATE_SAME_ACCESS)
+
+
+        def _find_w9xpopen(self) -> str:
+            """Find and return absolut path to w9xpopen.exe"""
+            w9xpopen = os.path.join(
+                            os.path.dirname(_subprocess.GetModuleFileName(0)),
+                                    "w9xpopen.exe")
+            if not os.path.exists(w9xpopen):
+                # Eeek - file-not-found - possibly an embedding
+                # situation - see if we can locate it in sys.exec_prefix
+                w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix),
+                                        "w9xpopen.exe")
+                if not os.path.exists(w9xpopen):
+                    raise RuntimeError("Cannot locate w9xpopen.exe, which is "
+                                       "needed for Popen to work with your "
+                                       "shell or platform.")
+            return w9xpopen
+
+
+        def _execute_child(self, args: Sequence[str], executable: str,
+                           preexec_fn: Callable[[], Any], close_fds: Any,
+                           pass_fds: Any, cwd: str, env: Mapping[str, str],
+                           universal_newlines: int,
+                           startupinfo: STARTUPINFO, creationflags: int,
+                           shell: int,
+                           p2cread: Any, p2cwrite: Any,
+                           c2pread: Any, c2pwrite: Any,
+                           errread: Any, errwrite: Any,
+                           restore_signals: bool,
+                           start_new_session: bool) -> None:
+            """Execute program (MS Windows version)"""
+
+            assert not pass_fds, "pass_fds not supported on Windows."
+
+            if not isinstance(args, str):
+                args = list2cmdline(args)
+
+            # Process startup details
+            if startupinfo is None:
+                startupinfo = STARTUPINFO()
+            if -1 not in (p2cread, c2pwrite, errwrite):
+                startupinfo.dwFlags |= _subprocess.STARTF_USESTDHANDLES
+                startupinfo.hStdInput = p2cread
+                startupinfo.hStdOutput = c2pwrite
+                startupinfo.hStdError = errwrite
+
+            if shell:
+                startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW
+                startupinfo.wShowWindow = _subprocess.SW_HIDE
+                comspec = os.environ.get("COMSPEC", "cmd.exe")
+                args = '{} /c "{}"'.format (comspec, args)
+                if (_subprocess.GetVersion() >= 0x80000000 or
+                        os.path.basename(comspec).lower() == "command.com"):
+                    # Win9x, or using command.com on NT. We need to
+                    # use the w9xpopen intermediate program. For more
+                    # information, see KB Q150956
+                    # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
+                    w9xpopen = self._find_w9xpopen()
+                    args = '"%s" %s' % (w9xpopen, args)
+                    # Not passing CREATE_NEW_CONSOLE has been known to
+                    # cause random failures on win9x.  Specifically a
+                    # dialog: "Your program accessed mem currently in
+                    # use at xxx" and a hopeful warning about the
+                    # stability of your system.  Cost is Ctrl+C won't
+                    # kill children.
+                    creationflags |= _subprocess.CREATE_NEW_CONSOLE
+
+            # Start the process
+            try:
+                hp, ht, pid, tid = _subprocess.CreateProcess(executable,
+                                         cast(str, args),
+                                         # no special security
+                                         None, None,
+                                         int(not close_fds),
+                                         creationflags,
+                                         env,
+                                         cwd,
+                                         startupinfo)
+            except pywintypes.error as e:
+                # Translate pywintypes.error to WindowsError, which is
+                # a subclass of OSError.  FIXME: We should really
+                # translate errno using _sys_errlist (or similar), but
+                # how can this be done from Python?
+                raise WindowsError(*e.args)
+            finally:
+                # Child is launched. Close the parent's copy of those pipe
+                # handles that only the child should have open.  You need
+                # to make sure that no handles to the write end of the
+                # output pipe are maintained in this process or else the
+                # pipe will not close when the child process exits and the
+                # ReadFile will hang.
+                if p2cread != -1:
+                    p2cread.Close()
+                if c2pwrite != -1:
+                    c2pwrite.Close()
+                if errwrite != -1:
+                    errwrite.Close()
+
+            # Retain the process handle, but close the thread handle
+            self._child_created = True
+            self._handle = hp
+            self.pid = pid
+            ht.Close()
+
+        def _internal_poll(self, _deadstate: int = None) -> int:
+            """Check if child process has terminated.  Returns returncode
+            attribute.
+
+            This method is called by __del__, so it can only refer to objects
+            in its local scope.
+
+            """
+            return self._internal_poll_win(_deadstate)
+
+        from _subprocess import Handle
+
+        def _internal_poll_win(self, _deadstate: int = None,
+                _WaitForSingleObject: Callable[[Handle, int], int] =
+                               _subprocess.WaitForSingleObject,
+                _WAIT_OBJECT_0: int = _subprocess.WAIT_OBJECT_0,
+                _GetExitCodeProcess: Callable[[Handle], int] =
+                                    _subprocess.GetExitCodeProcess) -> int:
+            if self.returncode is None:
+                if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0:
+                    self.returncode = _GetExitCodeProcess(self._handle)
+            return self.returncode
+
+
+        def wait(self) -> int:
+            """Wait for child process to terminate.  Returns returncode
+            attribute."""
+            if self.returncode is None:
+                _subprocess.WaitForSingleObject(self._handle,
+                                                _subprocess.INFINITE)
+                self.returncode = _subprocess.GetExitCodeProcess(self._handle)
+            return self.returncode
+
+
+        def _readerthread(self, fh: IO[AnyStr], buffer: List[AnyStr]) -> None:
+            buffer.append(fh.read())
+            fh.close()
+
+
+        def _communicate(self, input: Any) -> Tuple[Any, Any]:
+            stdout = cast(Any, None) # Return
+            stderr = cast(Any, None) # Return
+
+            if self.stdout:
+                stdout = []
+                stdout_thread = threading.Thread(target=self._readerthread,
+                                                 args=(self.stdout, stdout))
+                stdout_thread.daemon = True
+                stdout_thread.start()
+            if self.stderr:
+                stderr = []
+                stderr_thread = threading.Thread(target=self._readerthread,
+                                                 args=(self.stderr, stderr))
+                stderr_thread.daemon = True
+                stderr_thread.start()
+
+            if self.stdin:
+                if input is not None:
+                    try:
+                        self.stdin.write(input)
+                    except IOError as e:
+                        if e.errno != errno.EPIPE:
+                            raise
+                self.stdin.close()
+
+            if self.stdout:
+                stdout_thread.join()
+            if self.stderr:
+                stderr_thread.join()
+
+            # All data exchanged.  Translate lists into strings.
+            if stdout is not None:
+                stdout = stdout[0]
+            if stderr is not None:
+                stderr = stderr[0]
+
+            self.wait()
+            return (stdout, stderr)
+
+        def send_signal(self, sig: int) -> None:
+            """Send a signal to the process
+            """
+            if sig == signal.SIGTERM:
+                self.terminate()
+            elif sig == signal.CTRL_C_EVENT:
+                os.kill(self.pid, signal.CTRL_C_EVENT)
+            elif sig == signal.CTRL_BREAK_EVENT:
+                os.kill(self.pid, signal.CTRL_BREAK_EVENT)
+            else:
+                raise ValueError("Unsupported signal: {}".format(sig))
+
+        def terminate(self) -> None:
+            """Terminates the process
+            """
+            _subprocess.TerminateProcess(self._handle, 1)
+
+        def kill(self) -> None:
+            """Terminates the process
+            """
+            self.terminate()
+
+    else:
+        #
+        # POSIX methods
+        #
+        def _get_handles(self, stdin: Any, stdout: Any,
+                         stderr: Any) -> Tuple[Any, Any, Any, Any, Any, Any]:
+            """Construct and return tuple with IO objects:
+            p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
+            """
+            p2cread, p2cwrite = -1, -1
+            c2pread, c2pwrite = -1, -1
+            errread, errwrite = -1, -1
+
+            if stdin is None:
+                pass
+            elif stdin == PIPE:
+                p2cread, p2cwrite = _create_pipe()
+            elif isinstance(stdin, int):
+                p2cread = stdin
+            else:
+                # Assuming file-like object
+                p2cread = stdin.fileno()
+
+            if stdout is None:
+                pass
+            elif stdout == PIPE:
+                c2pread, c2pwrite = _create_pipe()
+            elif isinstance(stdout, int):
+                c2pwrite = stdout
+            else:
+                # Assuming file-like object
+                c2pwrite = stdout.fileno()
+
+            if stderr is None:
+                pass
+            elif stderr == PIPE:
+                errread, errwrite = _create_pipe()
+            elif stderr == STDOUT:
+                errwrite = c2pwrite
+            elif isinstance(stderr, int):
+                errwrite = stderr
+            else:
+                # Assuming file-like object
+                errwrite = stderr.fileno()
+
+            return (p2cread, p2cwrite,
+                    c2pread, c2pwrite,
+                    errread, errwrite)
+
+
+        def _close_fds(self, fds_to_keep: Set[int]) -> None:
+            start_fd = 3
+            for fd in sorted(fds_to_keep):
+                if fd >= start_fd:
+                    os.closerange(start_fd, fd)
+                    start_fd = fd + 1
+            if start_fd <= MAXFD:
+                os.closerange(start_fd, MAXFD)
+
+
+        def _execute_child(self, args: Sequence[str], executable: str,
+                           preexec_fn: Callable[[], Any], close_fds: Any,
+                           pass_fds: Any, cwd: str, env: Mapping[str, str],
+                           universal_newlines: int,
+                           startupinfo: 'STARTUPINFO', creationflags: int,
+                           shell: int,
+                           p2cread: Any, p2cwrite: Any,
+                           c2pread: Any, c2pwrite: Any,
+                           errread: Any, errwrite: Any,
+                           restore_signals: bool,
+                           start_new_session: bool) -> None:
+            """Execute program (POSIX version)"""
+
+            if isinstance(args, str):
+                args = [args]
+            else:
+                args = list(args)
+
+            if shell:
+                args = ["/bin/sh", "-c"] + args
+                if executable:
+                    args[0] = executable
+
+            if executable is None:
+                executable = args[0]
+
+            # For transferring possible exec failure from child to parent.
+            # Data format: "exception name:hex errno:description"
+            # Pickle is not used; it is complex and involves memory allocation.
+            errpipe_read, errpipe_write = _create_pipe()
+            try:
+                try:
+
+                    if have_posixsubprocess:
+                        # We must avoid complex work that could involve
+                        # malloc or free in the child process to avoid
+                        # potential deadlocks, thus we do all this here.
+                        # and pass it to fork_exec()
+
+                        if env is not None:
+                            env_list = [os.fsencode(k) + b'=' + os.fsencode(v)
+                                        for k, v in env.items()]
+                        else:
+                            env_list = None  # Use execv instead of execve.
+                        executable_enc = os.fsencode(executable)
+                        if os.path.dirname(executable_enc):
+                            executable_list = (executable_enc,) # type: tuple
+                        else:
+                            # This matches the behavior of os._execvpe().
+                            executable_list = tuple(
+                                os.path.join(os.fsencode(dir), executable_enc)
+                                for dir in os.get_exec_path(env))
+                        fds_to_keep = set(pass_fds)
+                        fds_to_keep.add(errpipe_write)
+                        self.pid = _posixsubprocess.fork_exec(
+                                args, executable_list,
+                                close_fds, sorted(fds_to_keep), cwd, env_list,
+                                p2cread, p2cwrite, c2pread, c2pwrite,
+                                errread, errwrite,
+                                errpipe_read, errpipe_write,
+                                restore_signals, start_new_session, preexec_fn)
+                        self._child_created = True
+                    else:
+                        # Pure Python implementation: It is not thread safe.
+                        # This implementation may deadlock in the child if your
+                        # parent process has any other threads running.
+
+                        gc_was_enabled = gc.isenabled()
+                        # Disable gc to avoid bug where gc -> file_dealloc ->
+                        # write to stderr -> hang.  See issue1336
+                        gc.disable()
+                        try:
+                            self.pid = os.fork()
+                        except:
+                            if gc_was_enabled:
+                                gc.enable()
+                            raise
+                        self._child_created = True
+                        if self.pid == 0:
+                            # Child
+                            try:
+                                # Close parent's pipe ends
+                                if p2cwrite != -1:
+                                    os.close(p2cwrite)
+                                if c2pread != -1:
+                                    os.close(c2pread)
+                                if errread != -1:
+                                    os.close(errread)
+                                os.close(errpipe_read)
+
+                                # When duping fds, if there arises a situation
+                                # where one of the fds is either 0, 1 or 2, it
+                                # is possible that it is overwritten (#12607).
+                                if c2pwrite == 0:
+                                    c2pwrite = os.dup(c2pwrite)
+                                if errwrite == 0 or errwrite == 1:
+                                    errwrite = os.dup(errwrite)
+
+                                # Dup fds for child
+                                def _dup2(a: int, b: int) -> None:
+                                    # dup2() removes the CLOEXEC flag but
+                                    # we must do it ourselves if dup2()
+                                    # would be a no-op (issue #10806).
+                                    if a == b:
+                                        _set_cloexec(a, False)
+                                    elif a != -1:
+                                        os.dup2(a, b)
+                                _dup2(p2cread, 0)
+                                _dup2(c2pwrite, 1)
+                                _dup2(errwrite, 2)
+
+                                # Close pipe fds.  Make sure we don't close the
+                                # same fd more than once, or standard fds.
+                                closed = set()  # type: Set[int]
+                                for fd in [p2cread, c2pwrite, errwrite]:
+                                    if fd > 2 and fd not in closed:
+                                        os.close(fd)
+                                        closed.add(fd)
+
+                                # Close all other fds, if asked for
+                                if close_fds:
+                                    fds_to_keep = set(pass_fds)
+                                    fds_to_keep.add(errpipe_write)
+                                    self._close_fds(fds_to_keep)
+
+
+                                if cwd is not None:
+                                    os.chdir(cwd)
+
+                                # This is a copy of Python/pythonrun.c
+                                # _Py_RestoreSignals().  If that were exposed
+                                # as a sys._py_restoresignals func it would be
+                                # better.. but this pure python implementation
+                                # isn't likely to be used much anymore.
+                                if restore_signals:
+                                    signals = ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ')
+                                    for sig in signals:
+                                        if hasattr(signal, sig):
+                                            signal.signal(getattr(signal, sig),
+                                                          signal.SIG_DFL)
+
+                                if start_new_session and hasattr(os, 'setsid'):
+                                    os.setsid()
+
+                                if preexec_fn:
+                                    preexec_fn()
+
+                                if env is None:
+                                    os.execvp(executable, args)
+                                else:
+                                    os.execvpe(executable, args, env)
+
+                            except:
+                                try:
+                                    exc_type, exc_value = sys.exc_info()[:2]
+                                    if isinstance(exc_value, OSError):
+                                        errno_num = exc_value.errno
+                                    else:
+                                        errno_num = 0
+                                    message = '%s:%x:%s' % (exc_type.__name__,
+                                                            errno_num, exc_value)
+                                    messageb = message.encode(errors="surrogatepass")
+                                    os.write(errpipe_write, messageb)
+                                except Exception:
+                                    # We MUST not allow anything odd happening
+                                    # above to prevent us from exiting below.
+                                    pass
+
+                            # This exitcode won't be reported to applications
+                            # so it really doesn't matter what we return.
+                            os._exit(255)
+
+                        # Parent
+                        if gc_was_enabled:
+                            gc.enable()
+                finally:
+                    # be sure the FD is closed no matter what
+                    os.close(errpipe_write)
+
+                if p2cread != -1 and p2cwrite != -1:
+                    os.close(p2cread)
+                if c2pwrite != -1 and c2pread != -1:
+                    os.close(c2pwrite)
+                if errwrite != -1 and errread != -1:
+                    os.close(errwrite)
+
+                # Wait for exec to fail or succeed; possibly raising an
+                # exception (limited in size)
+                data = bytearray()
+                while True:
+                    part = _eintr_retry_call(os.read, errpipe_read, 50000)
+                    data += part
+                    if not part or len(data) > 50000:
+                        break
+            finally:
+                # be sure the FD is closed no matter what
+                os.close(errpipe_read)
+
+            if data:
+                try:
+                    _eintr_retry_call(os.waitpid, self.pid, 0)
+                except OSError as e:
+                    if e.errno != errno.ECHILD:
+                        raise
+                try:
+                    (exception_name, hex_errno,
+                     err_msg_b) = bytes(data).split(b':', 2)
+                except ValueError:
+                    print('Bad exception data:', repr(data))
+                    exception_name = b'RuntimeError'
+                    hex_errno = b'0'
+                    err_msg_b = b'Unknown'
+                child_exception_type = getattr(
+                        builtins, exception_name.decode('ascii'),
+                        RuntimeError)
+                for fd in (p2cwrite, c2pread, errread):
+                    if fd != -1:
+                        os.close(fd)
+                err_msg = err_msg_b.decode(errors="surrogatepass")
+                if issubclass(child_exception_type, OSError) and hex_errno:
+                    errno_num = int(hex_errno, 16)
+                    if errno_num != 0:
+                        err_msg = os.strerror(errno_num)
+                        if errno_num == errno.ENOENT:
+                            err_msg += ': ' + repr(args[0])
+                    raise child_exception_type(errno_num, err_msg)
+                raise child_exception_type(err_msg)
+
+
+        def _handle_exitstatus(
+                self, sts: int,
+                _WIFSIGNALED: Callable[[int], bool] = os.WIFSIGNALED,
+                _WTERMSIG: Callable[[int], int] = os.WTERMSIG,
+                _WIFEXITED: Callable[[int], bool] = os.WIFEXITED,
+                _WEXITSTATUS: Callable[[int], int] = os.WEXITSTATUS) -> None:
+            # This method is called (indirectly) by __del__, so it cannot
+            # refer to anything outside of its local scope."""
+            if _WIFSIGNALED(sts):
+                self.returncode = -_WTERMSIG(sts)
+            elif _WIFEXITED(sts):
+                self.returncode = _WEXITSTATUS(sts)
+            else:
+                # Should never happen
+                raise RuntimeError("Unknown child exit status!")
+
+
+        def _internal_poll(self, _deadstate: int = None) -> int:
+            """Check if child process has terminated.  Returns returncode
+            attribute.
+
+            This method is called by __del__, so it cannot reference anything
+            outside of the local scope (nor can any methods it calls).
+
+            """
+            return self._internal_poll_posix(_deadstate)
+
+        def _internal_poll_posix(self, _deadstate: int = None,
+                                 _waitpid: Callable[[int, int],
+                                                 Tuple[int, int]] = os.waitpid,
+                                 _WNOHANG: int = os.WNOHANG,
+                                 _os_error: Any = os.error) -> int:
+            if self.returncode is None:
+                try:
+                    pid, sts = _waitpid(self.pid, _WNOHANG)
+                    if pid == self.pid:
+                        self._handle_exitstatus(sts)
+                except _os_error:
+                    if _deadstate is not None:
+                        self.returncode = _deadstate
+            return self.returncode
+
+
+        def wait(self) -> int:
+            """Wait for child process to terminate.  Returns returncode
+            attribute."""
+            if self.returncode is None:
+                try:
+                    pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
+                except OSError as e:
+                    if e.errno != errno.ECHILD:
+                        raise
+                    # This happens if SIGCLD is set to be ignored or waiting
+                    # for child processes has otherwise been disabled for our
+                    # process.  This child is dead, we can't get the status.
+                    sts = 0
+                self._handle_exitstatus(sts)
+            return self.returncode
+
+
+        def _communicate(self, input: Any) -> Tuple[Any, Any]:
+            if self.stdin:
+                # Flush stdio buffer.  This might block, if the user has
+                # been writing to .stdin in an uncontrolled fashion.
+                self.stdin.flush()
+                if not input:
+                    self.stdin.close()
+
+            if _has_poll:
+                stdout, stderr = self._communicate_with_poll(input)
+            else:
+                stdout, stderr = self._communicate_with_select(input)
+
+            # All data exchanged.  Translate lists into strings.
+            if stdout is not None:
+                stdout2 = b''.join(stdout)
+            else:
+                stdout2 = None
+            if stderr is not None:
+                stderr2 = b''.join(stderr)
+            else:
+                stderr2 = None
+
+            # Translate newlines, if requested.
+            # This also turns bytes into strings.
+            stdout3 = cast(Any, stdout2)
+            stderr3 = cast(Any, stderr2)
+            if self.universal_newlines:
+                if stdout is not None:
+                    stdout3 = self._translate_newlines(
+                        stdout2, cast(TextIO, self.stdout).encoding)
+                if stderr is not None:
+                    stderr3 = self._translate_newlines(
+                        stderr2, cast(TextIO, self.stderr).encoding)
+
+            self.wait()
+            return (stdout3, stderr3)
+
+
+        def _communicate_with_poll(self, input: Any) -> Tuple[List[bytes],
+                                                              List[bytes]]:
+            stdout = None # type: List[bytes] # Return
+            stderr = None # type: List[bytes] # Return
+            fd2file = {}  # type: Dict[int, Any]
+            fd2output = {}  # type: Dict[int, List[bytes]]
+
+            poller = select.poll()
+            def register_and_append(file_obj: IO[Any], eventmask: int) -> None:
+                poller.register(file_obj.fileno(), eventmask)
+                fd2file[file_obj.fileno()] = file_obj
+
+            def close_unregister_and_remove(fd: int) -> None:
+                poller.unregister(fd)
+                fd2file[fd].close()
+                fd2file.pop(fd)
+
+            if self.stdin and input:
+                register_and_append(self.stdin, select.POLLOUT)
+
+            select_POLLIN_POLLPRI = select.POLLIN | select.POLLPRI
+            if self.stdout:
+                register_and_append(self.stdout, select_POLLIN_POLLPRI)
+                fd2output[self.stdout.fileno()] = stdout = []
+            if self.stderr:
+                register_and_append(self.stderr, select_POLLIN_POLLPRI)
+                fd2output[self.stderr.fileno()] = stderr = []
+
+            input_offset = 0
+            while fd2file:
+                try:
+                    ready = poller.poll()
+                except select.error as e:
+                    if e.args[0] == errno.EINTR:
+                        continue
+                    raise
+
+                # XXX Rewrite these to use non-blocking I/O on the
+                # file objects; they are no longer using C stdio!
+
+                for fd, mode in ready:
+                    if mode & select.POLLOUT:
+                        chunk = input[input_offset : input_offset + _PIPE_BUF]
+                        try:
+                            input_offset += os.write(fd, chunk)
+                        except OSError as e2:
+                            if e2.errno == errno.EPIPE:
+                                close_unregister_and_remove(fd)
+                            else:
+                                raise
+                        else:
+                            if input_offset >= len(input):
+                                close_unregister_and_remove(fd)
+                    elif mode & select_POLLIN_POLLPRI:
+                        data = os.read(fd, 4096)
+                        if not data:
+                            close_unregister_and_remove(fd)
+                        fd2output[fd].append(data)
+                    else:
+                        # Ignore hang up or errors.
+                        close_unregister_and_remove(fd)
+
+            return (stdout, stderr)
+
+
+        def _communicate_with_select(self, input: Any) -> Tuple[List[bytes],
+                                                                List[bytes]]:
+            read_set = []  # type: List[IO[Any]]
+            write_set = []  # type: List[IO[Any]]
+            stdout = None # type: List[bytes] # Return
+            stderr = None # type: List[bytes] # Return
+
+            if self.stdin and input:
+                write_set.append(self.stdin)
+            if self.stdout:
+                read_set.append(self.stdout)
+                stdout = []
+            if self.stderr:
+                read_set.append(self.stderr)
+                stderr = []
+
+            input_offset = 0
+            while read_set or write_set:
+                try:
+                    rlist, wlist, xlist = select.select(read_set, write_set, [])
+                except select.error as e:
+                    if e.args[0] == errno.EINTR:
+                        continue
+                    raise
+
+                # XXX Rewrite these to use non-blocking I/O on the
+                # file objects; they are no longer using C stdio!
+
+                if self.stdin in wlist:
+                    chunk = input[input_offset : input_offset + _PIPE_BUF]
+                    try:
+                        bytes_written = os.write(self.stdin.fileno(), chunk)
+                    except OSError as oe:
+                        if oe.errno == errno.EPIPE:
+                            self.stdin.close()
+                            write_set.remove(self.stdin)
+                        else:
+                            raise
+                    else:
+                        input_offset += bytes_written
+                        if input_offset >= len(input):
+                            self.stdin.close()
+                            write_set.remove(self.stdin)
+
+                if self.stdout in rlist:
+                    data = os.read(self.stdout.fileno(), 1024)
+                    if not data:
+                        self.stdout.close()
+                        read_set.remove(self.stdout)
+                    stdout.append(data)
+
+                if self.stderr in rlist:
+                    data = os.read(self.stderr.fileno(), 1024)
+                    if not data:
+                        self.stderr.close()
+                        read_set.remove(self.stderr)
+                    stderr.append(data)
+
+            return (stdout, stderr)
+
+
+        def send_signal(self, sig: int) -> None:
+            """Send a signal to the process
+            """
+            os.kill(self.pid, sig)
+
+        def terminate(self) -> None:
+            """Terminate the process with SIGTERM
+            """
+            self.send_signal(signal.SIGTERM)
+
+        def kill(self) -> None:
+            """Kill the process with SIGKILL
+            """
+            self.send_signal(signal.SIGKILL)
+
+
+def _demo_posix() -> None:
+    #
+    # Example 1: Simple redirection: Get process list
+    #
+    plist = Popen(["ps"], stdout=PIPE).communicate()[0]
+    print("Process list:")
+    print(plist)
+
+    #
+    # Example 2: Change uid before executing child
+    #
+    if os.getuid() == 0:
+        p = Popen(["id"], preexec_fn=lambda: os.setuid(100))
+        p.wait()
+
+    #
+    # Example 3: Connecting several subprocesses
+    #
+    print("Looking for 'hda'...")
+    p1 = Popen(["dmesg"], stdout=PIPE)
+    p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
+    print(repr(p2.communicate()[0]))
+
+    #
+    # Example 4: Catch execution error
+    #
+    print()
+    print("Trying a weird file...")
+    try:
+        print(Popen(["/this/path/does/not/exist"]).communicate())
+    except OSError as e:
+        if e.errno == errno.ENOENT:
+            print("The file didn't exist.  I thought so...")
+        else:
+            print("Error", e.errno)
+    else:
+        print("Gosh.  No error.", file=sys.stderr)
+
+
+def _demo_windows() -> None:
+    #
+    # Example 1: Connecting several subprocesses
+    #
+    print("Looking for 'PROMPT' in set output...")
+    p1 = Popen("set", stdout=PIPE, shell=True)
+    p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE)
+    print(repr(p2.communicate()[0]))
+
+    #
+    # Example 2: Simple execution of program
+    #
+    print("Executing calc...")
+    p = Popen("calc")
+    p.wait()
+
+
+if __name__ == "__main__":
+    if mswindows:
+        _demo_windows()
+    else:
+        _demo_posix()
diff --git a/test-data/stdlib-samples/3.2/tempfile.py b/test-data/stdlib-samples/3.2/tempfile.py
new file mode 100644
index 0000000..f8deef0
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/tempfile.py
@@ -0,0 +1,721 @@
+"""Temporary files.
+
+This module provides generic, low- and high-level interfaces for
+creating temporary files and directories.  The interfaces listed
+as "safe" just below can be used without fear of race conditions.
+Those listed as "unsafe" cannot, and are provided for backward
+compatibility only.
+
+This module also provides some data items to the user:
+
+  TMP_MAX  - maximum number of names that will be tried before
+             giving up.
+  template - the default prefix for all temporary names.
+             You may change this to control the default prefix.
+  tempdir  - If this is set to a string before the first use of
+             any routine from this module, it will be considered as
+             another candidate location to store temporary files.
+"""
+
+__all__ = [
+    "NamedTemporaryFile", "TemporaryFile", # high level safe interfaces
+    "SpooledTemporaryFile", "TemporaryDirectory",
+    "mkstemp", "mkdtemp",                  # low level safe interfaces
+    "mktemp",                              # deprecated unsafe interface
+    "TMP_MAX", "gettempprefix",            # constants
+    "tempdir", "gettempdir"
+   ]
+
+
+# Imports.
+
+import warnings as _warnings
+import sys as _sys
+import io as _io
+import os as _os
+import errno as _errno
+from random import Random as _Random
+
+from typing import (
+    Any as _Any, Callable as _Callable, Iterator as _Iterator,
+    List as _List, Tuple as _Tuple, Dict as _Dict, Iterable as _Iterable,
+    IO as _IO, cast as _cast, Optional as _Optional, Type as _Type,
+)
+from types import TracebackType as _TracebackType
+
+try:
+    import fcntl as _fcntl
+except ImportError:
+    def _set_cloexec(fd: int) -> None:
+        pass
+else:
+    def _set_cloexec(fd: int) -> None:
+        try:
+            flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0)
+        except IOError:
+            pass
+        else:
+            # flags read successfully, modify
+            flags |= _fcntl.FD_CLOEXEC
+            _fcntl.fcntl(fd, _fcntl.F_SETFD, flags)
+
+
+try:
+    import _thread
+    _allocate_lock = _thread.allocate_lock # type: _Callable[[], _Any]
+except ImportError:
+    import _dummy_thread
+    _allocate_lock = _dummy_thread.allocate_lock
+
+_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL
+if hasattr(_os, 'O_NOINHERIT'):
+    _text_openflags |= _os.O_NOINHERIT
+if hasattr(_os, 'O_NOFOLLOW'):
+    _text_openflags |= _os.O_NOFOLLOW
+
+_bin_openflags = _text_openflags
+if hasattr(_os, 'O_BINARY'):
+    _bin_openflags |= _os.O_BINARY
+
+if hasattr(_os, 'TMP_MAX'):
+    TMP_MAX = _os.TMP_MAX
+else:
+    TMP_MAX = 10000
+
+template = "tmp"
+
+# Internal routines.
+
+_once_lock = _allocate_lock()
+
+if hasattr(_os, "lstat"):
+    _stat = _os.lstat # type: _Callable[[str], object]
+elif hasattr(_os, "stat"):
+    _stat = _os.stat
+else:
+    # Fallback.  All we need is something that raises os.error if the
+    # file doesn't exist.
+    def __stat(fn: str) -> object:
+        try:
+            f = open(fn)
+        except IOError:
+            raise _os.error()
+        f.close()
+        return None
+    _stat = __stat
+
+def _exists(fn: str) -> bool:
+    try:
+        _stat(fn)
+    except _os.error:
+        return False
+    else:
+        return True
+
+class _RandomNameSequence(_Iterator[str]):
+    """An instance of _RandomNameSequence generates an endless
+    sequence of unpredictable strings which can safely be incorporated
+    into file names.  Each string is six characters long.  Multiple
+    threads can safely use the same instance at the same time.
+
+    _RandomNameSequence is an iterator."""
+
+    characters = "abcdefghijklmnopqrstuvwxyz0123456789_"
+
+    @property
+    def rng(self) -> _Random:
+        cur_pid = _os.getpid()
+        if cur_pid != getattr(self, '_rng_pid', None):
+            self._rng = _Random()
+            self._rng_pid = cur_pid
+        return self._rng
+
+    def __iter__(self) -> _Iterator[str]:
+        return self
+
+    def __next__(self) -> str:
+        c = self.characters
+        choose = self.rng.choice
+        letters = [choose(c) for dummy in "123456"]
+        return ''.join(letters)
+
+def _candidate_tempdir_list() -> _List[str]:
+    """Generate a list of candidate temporary directories which
+    _get_default_tempdir will try."""
+
+    dirlist = [] # type: _List[str]
+
+    # First, try the environment.
+    for envname in 'TMPDIR', 'TEMP', 'TMP':
+        dirname = _os.getenv(envname)
+        if dirname: dirlist.append(dirname)
+
+    # Failing that, try OS-specific locations.
+    if _os.name == 'nt':
+        dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ])
+    else:
+        dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ])
+
+    # As a last resort, the current directory.
+    try:
+        dirlist.append(_os.getcwd())
+    except (AttributeError, _os.error):
+        dirlist.append(_os.curdir)
+
+    return dirlist
+
+def _get_default_tempdir() -> str:
+    """Calculate the default directory to use for temporary files.
+    This routine should be called exactly once.
+
+    We determine whether or not a candidate temp dir is usable by
+    trying to create and write to a file in that directory.  If this
+    is successful, the test file is deleted.  To prevent denial of
+    service, the name of the test file must be randomized."""
+
+    namer = _RandomNameSequence()
+    dirlist = _candidate_tempdir_list()
+
+    for dir in dirlist:
+        if dir != _os.curdir:
+            dir = _os.path.normcase(_os.path.abspath(dir))
+        # Try only a few names per directory.
+        for seq in range(100):
+            name = next(namer)
+            filename = _os.path.join(dir, name)
+            try:
+                fd = _os.open(filename, _bin_openflags, 0o600)
+                fp = _io.open(fd, 'wb')
+                fp.write(b'blat')
+                fp.close()
+                _os.unlink(filename)
+                fp = fd = None
+                return dir
+            except (OSError, IOError) as e:
+                if e.args[0] != _errno.EEXIST:
+                    break # no point trying more names in this directory
+                pass
+    raise IOError(_errno.ENOENT,
+                  "No usable temporary directory found in %s" % dirlist)
+
+_name_sequence = None # type: _RandomNameSequence
+
+def _get_candidate_names() -> _RandomNameSequence:
+    """Common setup sequence for all user-callable interfaces."""
+
+    global _name_sequence
+    if _name_sequence is None:
+        _once_lock.acquire()
+        try:
+            if _name_sequence is None:
+                _name_sequence = _RandomNameSequence()
+        finally:
+            _once_lock.release()
+    return _name_sequence
+
+
+def _mkstemp_inner(dir: str, pre: str, suf: str,
+                   flags: int) -> _Tuple[int, str]:
+    """Code common to mkstemp, TemporaryFile, and NamedTemporaryFile."""
+
+    names = _get_candidate_names()
+
+    for seq in range(TMP_MAX):
+        name = next(names)
+        file = _os.path.join(dir, pre + name + suf)
+        try:
+            fd = _os.open(file, flags, 0o600)
+            _set_cloexec(fd)
+            return (fd, _os.path.abspath(file))
+        except OSError as e:
+            if e.errno == _errno.EEXIST:
+                continue # try again
+            raise
+
+    raise IOError(_errno.EEXIST, "No usable temporary file name found")
+
+
+# User visible interfaces.
+
+def gettempprefix() -> str:
+    """Accessor for tempdir.template."""
+    return template
+
+tempdir = None # type: str
+
+def gettempdir() -> str:
+    """Accessor for tempfile.tempdir."""
+    global tempdir
+    if tempdir is None:
+        _once_lock.acquire()
+        try:
+            if tempdir is None:
+                tempdir = _get_default_tempdir()
+        finally:
+            _once_lock.release()
+    return tempdir
+
+def mkstemp(suffix: str = "", prefix: str = template, dir: str = None,
+            text: bool = False) -> _Tuple[int, str]:
+    """User-callable function to create and return a unique temporary
+    file.  The return value is a pair (fd, name) where fd is the
+    file descriptor returned by os.open, and name is the filename.
+
+    If 'suffix' is specified, the file name will end with that suffix,
+    otherwise there will be no suffix.
+
+    If 'prefix' is specified, the file name will begin with that prefix,
+    otherwise a default prefix is used.
+
+    If 'dir' is specified, the file will be created in that directory,
+    otherwise a default directory is used.
+
+    If 'text' is specified and true, the file is opened in text
+    mode.  Else (the default) the file is opened in binary mode.  On
+    some operating systems, this makes no difference.
+
+    The file is readable and writable only by the creating user ID.
+    If the operating system uses permission bits to indicate whether a
+    file is executable, the file is executable by no one. The file
+    descriptor is not inherited by children of this process.
+
+    Caller is responsible for deleting the file when done with it.
+    """
+
+    if dir is None:
+        dir = gettempdir()
+
+    if text:
+        flags = _text_openflags
+    else:
+        flags = _bin_openflags
+
+    return _mkstemp_inner(dir, prefix, suffix, flags)
+
+
+def mkdtemp(suffix: str = "", prefix: str = template, dir: str = None) -> str:
+    """User-callable function to create and return a unique temporary
+    directory.  The return value is the pathname of the directory.
+
+    Arguments are as for mkstemp, except that the 'text' argument is
+    not accepted.
+
+    The directory is readable, writable, and searchable only by the
+    creating user.
+
+    Caller is responsible for deleting the directory when done with it.
+    """
+
+    if dir is None:
+        dir = gettempdir()
+
+    names = _get_candidate_names()
+
+    for seq in range(TMP_MAX):
+        name = next(names)
+        file = _os.path.join(dir, prefix + name + suffix)
+        try:
+            _os.mkdir(file, 0o700)
+            return file
+        except OSError as e:
+            if e.errno == _errno.EEXIST:
+                continue # try again
+            raise
+
+    raise IOError(_errno.EEXIST, "No usable temporary directory name found")
+
+def mktemp(suffix: str = "", prefix: str = template, dir: str = None) -> str:
+    """User-callable function to return a unique temporary file name.  The
+    file is not created.
+
+    Arguments are as for mkstemp, except that the 'text' argument is
+    not accepted.
+
+    This function is unsafe and should not be used.  The file name
+    refers to a file that did not exist at some point, but by the time
+    you get around to creating it, someone else may have beaten you to
+    the punch.
+    """
+
+##    from warnings import warn as _warn
+##    _warn("mktemp is a potential security risk to your program",
+##          RuntimeWarning, stacklevel=2)
+
+    if dir is None:
+        dir = gettempdir()
+
+    names = _get_candidate_names()
+    for seq in range(TMP_MAX):
+        name = next(names)
+        file = _os.path.join(dir, prefix + name + suffix)
+        if not _exists(file):
+            return file
+
+    raise IOError(_errno.EEXIST, "No usable temporary filename found")
+
+
+class _TemporaryFileWrapper:
+    """Temporary file wrapper
+
+    This class provides a wrapper around files opened for
+    temporary use.  In particular, it seeks to automatically
+    remove the file when it is no longer needed.
+    """
+
+    def __init__(self, file: _IO[_Any], name: str,
+                 delete: bool = True) -> None:
+        self.file = file
+        self.name = name
+        self.close_called = False
+        self.delete = delete
+
+        if _os.name != 'nt':
+            # Cache the unlinker so we don't get spurious errors at
+            # shutdown when the module-level "os" is None'd out.  Note
+            # that this must be referenced as self.unlink, because the
+            # name TemporaryFileWrapper may also get None'd out before
+            # __del__ is called.
+            self.unlink = _os.unlink
+
+    def __getattr__(self, name: str) -> _Any:
+        # Attribute lookups are delegated to the underlying file
+        # and cached for non-numeric results
+        # (i.e. methods are cached, closed and friends are not)
+        file = _cast(_Any, self).__dict__['file'] # type: _IO[_Any]
+        a = getattr(file, name)
+        if not isinstance(a, int):
+            setattr(self, name, a)
+        return a
+
+    # The underlying __enter__ method returns the wrong object
+    # (self.file) so override it to return the wrapper
+    def __enter__(self) -> '_TemporaryFileWrapper':
+        self.file.__enter__()
+        return self
+
+    # iter() doesn't use __getattr__ to find the __iter__ method
+    def __iter__(self) -> _Iterator[_Any]:
+        return iter(self.file)
+
+    # NT provides delete-on-close as a primitive, so we don't need
+    # the wrapper to do anything special.  We still use it so that
+    # file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile.
+    if _os.name != 'nt':
+        def close(self) -> None:
+            if not self.close_called:
+                self.close_called = True
+                self.file.close()
+                if self.delete:
+                    self.unlink(self.name)
+
+        def __del__(self) -> None:
+            self.close()
+
+        # Need to trap __exit__ as well to ensure the file gets
+        # deleted when used in a with statement
+        def __exit__(self, exc: _Type[BaseException], value: BaseException,
+                     tb: _Optional[_TracebackType]) -> bool:
+            result = self.file.__exit__(exc, value, tb)
+            self.close()
+            return result
+    else:
+        def __exit__(self, exc: _Type[BaseException], value: BaseException,
+                     tb: _Optional[_TracebackType]) -> bool:
+            self.file.__exit__(exc, value, tb)
+            return False
+
+
+def NamedTemporaryFile(mode: str = 'w+b', buffering: int = -1,
+                       encoding: str = None, newline: str = None,
+                       suffix: str = "", prefix: str = template,
+                       dir: str = None, delete: bool = True) -> _IO[_Any]:
+    """Create and return a temporary file.
+    Arguments:
+    'prefix', 'suffix', 'dir' -- as for mkstemp.
+    'mode' -- the mode argument to io.open (default "w+b").
+    'buffering' -- the buffer size argument to io.open (default -1).
+    'encoding' -- the encoding argument to io.open (default None)
+    'newline' -- the newline argument to io.open (default None)
+    'delete' -- whether the file is deleted on close (default True).
+    The file is created as mkstemp() would do it.
+
+    Returns an object with a file-like interface; the name of the file
+    is accessible as file.name.  The file will be automatically deleted
+    when it is closed unless the 'delete' argument is set to False.
+    """
+
+    if dir is None:
+        dir = gettempdir()
+
+    flags = _bin_openflags
+
+    # Setting O_TEMPORARY in the flags causes the OS to delete
+    # the file when it is closed.  This is only supported by Windows.
+    if _os.name == 'nt' and delete:
+        flags |= _os.O_TEMPORARY
+
+    (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
+    file = _io.open(fd, mode, buffering=buffering,
+                    newline=newline, encoding=encoding)
+
+    return _cast(_IO[_Any], _TemporaryFileWrapper(file, name, delete))
+
+if _os.name != 'posix' or _sys.platform == 'cygwin':
+    # On non-POSIX and Cygwin systems, assume that we cannot unlink a file
+    # while it is open.
+    TemporaryFile = NamedTemporaryFile
+
+else:
+    def _TemporaryFile(mode: str = 'w+b', buffering: int = -1,
+                       encoding: str = None, newline: str = None,
+                       suffix: str = "", prefix: str = template,
+                       dir: str = None, delete: bool = True) -> _IO[_Any]:
+        """Create and return a temporary file.
+        Arguments:
+        'prefix', 'suffix', 'dir' -- as for mkstemp.
+        'mode' -- the mode argument to io.open (default "w+b").
+        'buffering' -- the buffer size argument to io.open (default -1).
+        'encoding' -- the encoding argument to io.open (default None)
+        'newline' -- the newline argument to io.open (default None)
+        The file is created as mkstemp() would do it.
+
+        Returns an object with a file-like interface.  The file has no
+        name, and will cease to exist when it is closed.
+        """
+
+        if dir is None:
+            dir = gettempdir()
+
+        flags = _bin_openflags
+
+        (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
+        try:
+            _os.unlink(name)
+            return _io.open(fd, mode, buffering=buffering,
+                            newline=newline, encoding=encoding)
+        except:
+            _os.close(fd)
+            raise
+    TemporaryFile = _TemporaryFile
+
+class SpooledTemporaryFile:
+    """Temporary file wrapper, specialized to switch from
+    StringIO to a real file when it exceeds a certain size or
+    when a fileno is needed.
+    """
+    _rolled = False
+    _file = None  # type: _Any   # BytesIO, StringIO or TemporaryFile
+
+    def __init__(self, max_size: int = 0, mode: str = 'w+b',
+                 buffering: int = -1, encoding: str = None,
+                 newline: str = None, suffix: str = "",
+                 prefix: str = template, dir: str = None) -> None:
+        if 'b' in mode:
+            self._file = _io.BytesIO()
+        else:
+            # Setting newline="\n" avoids newline translation;
+            # this is important because otherwise on Windows we'd
+            # hget double newline translation upon rollover().
+            self._file = _io.StringIO(newline="\n")
+        self._max_size = max_size
+        self._rolled = False
+        self._TemporaryFileArgs = {
+                                   'mode': mode, 'buffering': buffering,
+                                   'suffix': suffix, 'prefix': prefix,
+                                   'encoding': encoding, 'newline': newline,
+                                   'dir': dir} # type: _Dict[str, _Any]
+
+    def _check(self, file: _IO[_Any]) -> None:
+        if self._rolled: return
+        max_size = self._max_size
+        if max_size and file.tell() > max_size:
+            self.rollover()
+
+    def rollover(self) -> None:
+        if self._rolled: return
+        file = self._file
+        newfile = self._file = TemporaryFile(**self._TemporaryFileArgs)
+        self._TemporaryFileArgs = None
+
+        newfile.write(file.getvalue())
+        newfile.seek(file.tell(), 0)
+
+        self._rolled = True
+
+    # The method caching trick from NamedTemporaryFile
+    # won't work here, because _file may change from a
+    # _StringIO instance to a real file. So we list
+    # all the methods directly.
+
+    # Context management protocol
+    def __enter__(self) -> 'SpooledTemporaryFile':
+        if self._file.closed:
+            raise ValueError("Cannot enter context with closed file")
+        return self
+
+    def __exit__(self, exc: type, value: BaseException,
+                 tb: _TracebackType) -> bool:
+        self._file.close()
+        return False
+
+    # file protocol
+    def __iter__(self) -> _Iterable[_Any]:
+        return self._file.__iter__()
+
+    def close(self) -> None:
+        self._file.close()
+
+    @property
+    def closed(self) -> bool:
+        return self._file.closed
+
+    @property
+    def encoding(self) -> str:
+        return self._file.encoding
+
+    def fileno(self) -> int:
+        self.rollover()
+        return self._file.fileno()
+
+    def flush(self) -> None:
+        self._file.flush()
+
+    def isatty(self) -> bool:
+        return self._file.isatty()
+
+    @property
+    def mode(self) -> str:
+        return self._file.mode
+
+    @property
+    def name(self) -> str:
+        return self._file.name
+
+    @property
+    def newlines(self) -> _Any:
+        return self._file.newlines
+
+    #def next(self):
+    #    return self._file.next
+
+    def read(self, n: int = -1) -> _Any:
+        return self._file.read(n)
+
+    def readline(self, limit: int = -1) -> _Any:
+        return self._file.readline(limit)
+
+    def readlines(self, *args) -> _List[_Any]:
+        return self._file.readlines(*args)
+
+    def seek(self, offset: int, whence: int = 0) -> None:
+        self._file.seek(offset, whence)
+
+    @property
+    def softspace(self) -> bool:
+        return self._file.softspace
+
+    def tell(self) -> int:
+        return self._file.tell()
+
+    def truncate(self) -> None:
+        self._file.truncate()
+
+    def write(self, s: _Any) -> int:
+        file = self._file # type: _IO[_Any]
+        rv = file.write(s)
+        self._check(file)
+        return rv
+
+    def writelines(self, iterable: _Iterable[_Any]) -> None:
+        file = self._file # type: _IO[_Any]
+        file.writelines(iterable)
+        self._check(file)
+
+    #def xreadlines(self, *args) -> _Any:
+    #    return self._file.xreadlines(*args)
+
+
+class TemporaryDirectory(object):
+    """Create and return a temporary directory.  This has the same
+    behavior as mkdtemp but can be used as a context manager.  For
+    example:
+
+        with TemporaryDirectory() as tmpdir:
+            ...
+
+    Upon exiting the context, the directory and everthing contained
+    in it are removed.
+    """
+
+    def __init__(self, suffix: str = "", prefix: str = template,
+                 dir: str = None) -> None:
+        self._closed = False
+        self.name = None # type: str # Handle mkdtemp throwing an exception
+        self.name = mkdtemp(suffix, prefix, dir)
+
+        # XXX (ncoghlan): The following code attempts to make
+        # this class tolerant of the module nulling out process
+        # that happens during CPython interpreter shutdown
+        # Alas, it doesn't actually manage it. See issue #10188
+        self._listdir = _os.listdir
+        self._path_join = _os.path.join
+        self._isdir = _os.path.isdir
+        self._islink = _os.path.islink
+        self._remove = _os.remove
+        self._rmdir = _os.rmdir
+        self._os_error = _os.error
+        self._warn = _warnings.warn
+
+    def __repr__(self) -> str:
+        return "<{} {!r}>".format(self.__class__.__name__, self.name)
+
+    def __enter__(self) -> str:
+        return self.name
+
+    def cleanup(self, _warn: bool = False) -> None:
+        if self.name and not self._closed:
+            try:
+                self._rmtree(self.name)
+            except (TypeError, AttributeError) as ex:
+                # Issue #10188: Emit a warning on stderr
+                # if the directory could not be cleaned
+                # up due to missing globals
+                if "None" not in str(ex):
+                    raise
+                print("ERROR: {!r} while cleaning up {!r}".format(ex, self,),
+                      file=_sys.stderr)
+                return
+            self._closed = True
+            if _warn:
+                self._warn("Implicitly cleaning up {!r}".format(self),
+                           ResourceWarning)
+
+    def __exit__(self, exc: type, value: BaseException,
+                 tb: _TracebackType) -> bool:
+        self.cleanup()
+        return False
+
+    def __del__(self) -> None:
+        # Issue a ResourceWarning if implicit cleanup needed
+        self.cleanup(_warn=True)
+
+    def _rmtree(self, path: str) -> None:
+        # Essentially a stripped down version of shutil.rmtree.  We can't
+        # use globals because they may be None'ed out at shutdown.
+        for name in self._listdir(path):
+            fullname = self._path_join(path, name)
+            try:
+                isdir = self._isdir(fullname) and not self._islink(fullname)
+            except self._os_error:
+                isdir = False
+            if isdir:
+                self._rmtree(fullname)
+            else:
+                try:
+                    self._remove(fullname)
+                except self._os_error:
+                    pass
+        try:
+            self._rmdir(path)
+        except self._os_error:
+            pass
diff --git a/test-data/stdlib-samples/3.2/test/__init__.py b/test-data/stdlib-samples/3.2/test/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test-data/stdlib-samples/3.2/test/randv2_32.pck b/test-data/stdlib-samples/3.2/test/randv2_32.pck
new file mode 100644
index 0000000..587ab24
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/randv2_32.pck
@@ -0,0 +1,633 @@
+crandom
+Random
+p0
+(tRp1
+(I2
+(I-2147483648
+I-845974985
+I-1294090086
+I1193659239
+I-1849481736
+I-946579732
+I-34406770
+I1749049471
+I1997774682
+I1432026457
+I1288127073
+I-943175655
+I-1718073964
+I339993548
+I-1045260575
+I582505037
+I-1555108250
+I-1114765620
+I1578648750
+I-350384412
+I-20845848
+I-288255314
+I738790953
+I1901249641
+I1999324672
+I-277361068
+I-1515885839
+I2061761596
+I-809068089
+I1287981136
+I258129492
+I-6303745
+I-765148337
+I1090344911
+I1653434703
+I-1242923628
+I1639171313
+I-1870042660
+I-1655014050
+I345609048
+I2093410138
+I1963263374
+I-2122098342
+I1336859961
+I-810942729
+I945857753
+I2103049942
+I623922684
+I1418349549
+I690877342
+I754973107
+I-1605111847
+I1607137813
+I-1704917131
+I1317536428
+I1714882872
+I-1665385120
+I1823694397
+I-1790836866
+I-1696724812
+I-603979847
+I-498599394
+I-341265291
+I927388804
+I1778562135
+I1716895781
+I1023198122
+I1726145967
+I941955525
+I1240148950
+I-1929634545
+I-1288147083
+I-519318335
+I754559777
+I-707571958
+I374604022
+I420424061
+I-1095443486
+I1621934944
+I-1220502522
+I-140049608
+I-918917122
+I304341024
+I-1637446057
+I-353934485
+I1973436235
+I433380241
+I-686759465
+I-2111563154
+I-573422032
+I804304541
+I1513063483
+I1417381689
+I-804778729
+I211756408
+I544537322
+I890881641
+I150378374
+I1765739392
+I1011604116
+I584889095
+I1400520554
+I413747808
+I-1741992587
+I-1882421574
+I-1373001903
+I-1885348538
+I903819480
+I1083220038
+I-1318105424
+I1740421404
+I1693089625
+I775965557
+I1319608037
+I-2127475785
+I-367562895
+I-1416273451
+I1693000327
+I-1217438421
+I834405522
+I-128287275
+I864057548
+I-973917356
+I7304111
+I1712253182
+I1353897741
+I672982288
+I1778575559
+I-403058377
+I-38540378
+I-1393713496
+I13193171
+I1127196200
+I205176472
+I-2104790506
+I299985416
+I1403541685
+I-1018270667
+I-1980677490
+I-1182625797
+I1637015181
+I-1795357414
+I1514413405
+I-924516237
+I-1841873650
+I-1014591269
+I1576616065
+I-1319103135
+I-120847840
+I2062259778
+I-9285070
+I1160890300
+I-575137313
+I-1509108275
+I46701926
+I-287560914
+I-256824960
+I577558250
+I900598310
+I944607867
+I2121154920
+I-1170505192
+I-1347170575
+I77247778
+I-1899015765
+I1234103327
+I1027053658
+I1934632322
+I-792031234
+I1147322536
+I1290655117
+I1002059715
+I1325898538
+I896029793
+I-790940694
+I-980470721
+I-1922648255
+I-951672814
+I291543943
+I1158740218
+I-1959023736
+I-1977185236
+I1527900076
+I514104195
+I-814154113
+I-593157883
+I-1023704660
+I1285688377
+I-2117525386
+I768954360
+I-38676846
+I-799848659
+I-1305517259
+I-1938213641
+I-462146758
+I-1663302892
+I1899591069
+I-22935388
+I-275856976
+I-443736893
+I-739441156
+I93862068
+I-838105669
+I1735629845
+I-817484206
+I280814555
+I1753547179
+I1811123479
+I1974543632
+I-48447465
+I-642694345
+I-531149613
+I518698953
+I-221642627
+I-686519187
+I776644303
+I257774400
+I-1499134857
+I-1055273455
+I-237023943
+I1981752330
+I-917671662
+I-372905983
+I1588058420
+I1171936660
+I-1730977121
+I1360028989
+I1769469287
+I1910709542
+I-852692959
+I1396944667
+I-1723999155
+I-310975435
+I-1965453954
+I-1636858570
+I2005650794
+I680293715
+I1355629386
+I844514684
+I-1909152807
+I-808646074
+I1936510018
+I1134413810
+I-143411047
+I-1478436304
+I1394969244
+I-1170110660
+I1963112086
+I-1518351049
+I-1506287443
+I-455023090
+I-855366028
+I-1746785568
+I933990882
+I-703625141
+I-285036872
+I188277905
+I1471578620
+I-981382835
+I-586974220
+I945619758
+I1608778444
+I-1708548066
+I-1897629320
+I-42617810
+I-836840790
+I539154487
+I-235706962
+I332074418
+I-575700589
+I1534608003
+I632116560
+I-1819760653
+I642052958
+I-722391771
+I-1104719475
+I-1196847084
+I582413973
+I1563394876
+I642007944
+I108989456
+I361625014
+I677308625
+I-1806529496
+I-959050708
+I-1858251070
+I-216069832
+I701624579
+I501238033
+I12287030
+I1895107107
+I2089098638
+I-874806230
+I1236279203
+I563718890
+I-544352489
+I-1879707498
+I1767583393
+I-1776604656
+I-693294301
+I-88882831
+I169303357
+I1299196152
+I-1122791089
+I-379157172
+I1934671851
+I1575736961
+I-19573174
+I-1401511009
+I9305167
+I-1115174467
+I1670735537
+I1226436501
+I-2004524535
+I1767463878
+I-1722855079
+I-559413926
+I1529810851
+I1201272087
+I-1297130971
+I-1188149982
+I1396557188
+I-370358342
+I-1006619702
+I1600942463
+I906087130
+I-76991909
+I2069580179
+I-1674195181
+I-2098404729
+I-940972459
+I-573399187
+I-1930386277
+I-721311199
+I-647834744
+I1452181671
+I688681916
+I1812793731
+I1704380620
+I-1389615179
+I866287837
+I-1435265007
+I388400782
+I-147986600
+I-1613598851
+I-1040347408
+I782063323
+I-239282031
+I-575966722
+I-1865208174
+I-481365146
+I579572803
+I-1239481494
+I335361280
+I-429722947
+I1881772789
+I1908103808
+I1653690013
+I-1668588344
+I1933787953
+I-2033480609
+I22162797
+I-1516527040
+I-461232482
+I-16201372
+I-2043092030
+I114990337
+I-1524090084
+I1456374020
+I458606440
+I-1928083218
+I227773125
+I-1129028159
+I1678689
+I1575896907
+I-1792935220
+I-151387575
+I64084088
+I-95737215
+I1337335688
+I-1963466345
+I1243315130
+I-1798518411
+I-546013212
+I-607065396
+I1219824160
+I1715218469
+I-1368163783
+I1701552913
+I-381114888
+I1068821717
+I266062971
+I-2066513172
+I1767407229
+I-780936414
+I-705413443
+I-1256268847
+I1646874149
+I1107690353
+I839133072
+I67001749
+I860763503
+I884880613
+I91977084
+I755371933
+I420745153
+I-578480690
+I-1520193551
+I1011369331
+I-99754575
+I-733141064
+I-500598588
+I1081124271
+I-1341266575
+I921002612
+I-848852487
+I-1904467341
+I-1294256973
+I-94074714
+I-1778758498
+I-1401188547
+I2101830578
+I2058864877
+I-272875991
+I-1375854779
+I-1332937870
+I619425525
+I-1034529639
+I-36454393
+I-2030499985
+I-1637127500
+I-1408110287
+I-2108625749
+I-961007436
+I1475654951
+I-791946251
+I1667792115
+I1818978830
+I1897980514
+I1959546477
+I-74478911
+I-508643347
+I461594399
+I538802715
+I-2094970071
+I-2076660253
+I1091358944
+I1944029246
+I-343957436
+I-1915845022
+I1237620188
+I1144125174
+I1522190520
+I-670252952
+I-19469226
+I675626510
+I758750096
+I909724354
+I-1846259652
+I544669343
+I445182495
+I-821519930
+I-1124279685
+I-1668995122
+I1653284793
+I-678555151
+I-687513207
+I1558259445
+I-1978866839
+I1558835601
+I1732138472
+I-1904793363
+I620020296
+I1562597874
+I1942617227
+I-549632552
+I721603795
+I417978456
+I-1355281522
+I-538065208
+I-1079523196
+I187375699
+I449064972
+I1018083947
+I1632388882
+I-493269866
+I92769041
+I1477146750
+I1782708404
+I444873376
+I1085851104
+I-6823272
+I-1302251853
+I1602050688
+I-1042187824
+I287161745
+I-1972094479
+I103271491
+I2131619773
+I-2064115870
+I766815498
+I990861458
+I-1664407378
+I1083746756
+I-1018331904
+I-677315687
+I-951670647
+I-952356874
+I451460609
+I-818615564
+I851439508
+I656362634
+I-1351240485
+I823378078
+I1985597385
+I597757740
+I-1512303057
+I1590872798
+I1108424213
+I818850898
+I-1368594306
+I-201107761
+I1793370378
+I1247597611
+I-1594326264
+I-601653890
+I427642759
+I248322113
+I-292545338
+I1708985870
+I1917042771
+I429354503
+I-478470329
+I793960014
+I369939133
+I1728189157
+I-518963626
+I-278523974
+I-1877289696
+I-2088617658
+I-1367940049
+I-62295925
+I197975119
+I-252900777
+I803430539
+I485759441
+I-528283480
+I-1287443963
+I-478617444
+I-861906946
+I-649095555
+I-893184337
+I2050571322
+I803433133
+I1629574571
+I1649720417
+I-2050225209
+I1208598977
+I720314344
+I-615166251
+I-835077127
+I-1405372429
+I995698064
+I148123240
+I-943016676
+I-594609622
+I-1381596711
+I1017195301
+I-1268893013
+I-1815985179
+I-1393570351
+I-870027364
+I-476064472
+I185582645
+I569863326
+I1098584267
+I-1599147006
+I-485054391
+I-852098365
+I1477320135
+I222316762
+I-1515583064
+I-935051367
+I393383063
+I819617226
+I722921837
+I-1241806499
+I-1358566385
+I1666813591
+I1333875114
+I-1663688317
+I-47254623
+I-885800726
+I307388991
+I-1219459496
+I1374870300
+I2132047877
+I-1385624198
+I-245139206
+I1015139214
+I-926198559
+I1969798868
+I-1950480619
+I-559193432
+I-1256446518
+I-1983476981
+I790179655
+I1004289659
+I1541827617
+I1555805575
+I501127333
+I-1123446797
+I-453230915
+I2035104883
+I1296122398
+I-1843698604
+I-715464588
+I337143971
+I-1972119192
+I606777909
+I726977302
+I-1149501872
+I-1963733522
+I-1797504644
+I624
+tp2
+Ntp3
+b.
\ No newline at end of file
diff --git a/test-data/stdlib-samples/3.2/test/randv2_64.pck b/test-data/stdlib-samples/3.2/test/randv2_64.pck
new file mode 100644
index 0000000..090dd6f
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/randv2_64.pck
@@ -0,0 +1,633 @@
+crandom
+Random
+p0
+(tRp1
+(I2
+(I2147483648
+I1812115682
+I2741755497
+I1028055730
+I809166036
+I2773628650
+I62321950
+I535290043
+I349877800
+I976167039
+I2490696940
+I3631326955
+I2107991114
+I2941205793
+I3199611605
+I1871971556
+I1456108540
+I2984591044
+I140836801
+I4203227310
+I3652722980
+I4031971234
+I555769760
+I697301296
+I2347638880
+I3302335858
+I320255162
+I2553586608
+I1570224361
+I2838780912
+I2315834918
+I2351348158
+I3545433015
+I2292018579
+I1177569331
+I758497559
+I2913311175
+I1014948880
+I1793619243
+I3982451053
+I3850988342
+I2393984324
+I1583100093
+I3144742543
+I3655047493
+I3507532385
+I3094515442
+I350042434
+I2455294844
+I1038739312
+I313809152
+I189433072
+I1653165452
+I4186650593
+I19281455
+I2589680619
+I4145931590
+I4283266118
+I636283172
+I943618337
+I3170184633
+I2308766231
+I634615159
+I538152647
+I2079576891
+I1029442616
+I3410689412
+I1370292761
+I1071718978
+I2139496322
+I1876699543
+I3485866187
+I3157490130
+I1633105386
+I1453253160
+I3841322080
+I3789608924
+I4110770792
+I95083673
+I931354627
+I2065389591
+I3448339827
+I3348204577
+I3263528560
+I2411324590
+I4003055026
+I1869670093
+I2737231843
+I4150701155
+I2689667621
+I2993263224
+I3239890140
+I1191430483
+I1214399779
+I3623428533
+I1817058866
+I3052274451
+I326030082
+I1505129312
+I2306812262
+I1349150363
+I1099127895
+I2543465574
+I2396380193
+I503926466
+I1607109730
+I3451716817
+I58037114
+I4290081119
+I947517597
+I3083440186
+I520522630
+I2948962496
+I4184319574
+I2957636335
+I668374201
+I2325446473
+I472785314
+I3791932366
+I573017189
+I2185725379
+I1262251492
+I3525089379
+I2951262653
+I1305347305
+I940958122
+I3343754566
+I359371744
+I3874044973
+I396897232
+I147188248
+I716683703
+I4013880315
+I1133359586
+I1794612249
+I3480815192
+I3988787804
+I1729355809
+I573408542
+I1419310934
+I1770030447
+I3552845567
+I1693976502
+I1271189893
+I2298236738
+I2049219027
+I3464198070
+I1233574082
+I1007451781
+I1838253750
+I687096593
+I1131375603
+I1223013895
+I1490478435
+I339265439
+I4232792659
+I491538536
+I2816256769
+I1044097522
+I2566227049
+I748762793
+I1511830494
+I3593259822
+I4121279213
+I3735541309
+I3609794797
+I1939942331
+I377570434
+I1437957554
+I1831285696
+I55062811
+I2046783110
+I1303902283
+I1838349877
+I420993556
+I1256392560
+I2795216506
+I2783687924
+I3322303169
+I512794749
+I308405826
+I517164429
+I3320436022
+I1328403632
+I2269184746
+I3729522810
+I3304314450
+I2238756124
+I1690581361
+I3813277532
+I4119706879
+I2659447875
+I388818978
+I2064580814
+I1586227676
+I2627522685
+I2017792269
+I547928109
+I859107450
+I1062238929
+I858886237
+I3795783146
+I4173914756
+I3835915965
+I3329504821
+I3494579904
+I838863205
+I3399734724
+I4247387481
+I3618414834
+I2984433798
+I2165205561
+I4260685684
+I3045904244
+I3450093836
+I3597307595
+I3215851166
+I3162801328
+I2558283799
+I950068105
+I1829664117
+I3108542987
+I2378860527
+I790023460
+I280087750
+I1171478018
+I2333653728
+I3976932140
+I896746152
+I1802494195
+I1232873794
+I2749440836
+I2032037296
+I2012091682
+I1296131034
+I3892133385
+I908161334
+I2296791795
+I548169794
+I696265
+I893156828
+I426904709
+I3565374535
+I2655906825
+I2792178515
+I2406814632
+I4038847579
+I3123934642
+I2197503004
+I3535032597
+I2266216689
+I2117613462
+I1787448518
+I1875089416
+I2037165384
+I1140676321
+I3606296464
+I3229138231
+I2458267132
+I1874651171
+I3331900867
+I1000557654
+I1432861701
+I473636323
+I2691783927
+I1871437447
+I1328016401
+I4118690062
+I449467602
+I681789035
+I864889442
+I1200888928
+I75769445
+I4008690037
+I2464577667
+I4167795823
+I3070097648
+I2579174882
+I1216886568
+I3810116343
+I2249507485
+I3266903480
+I3671233480
+I100191658
+I3087121334
+I365063087
+I3821275176
+I2165052848
+I1282465245
+I3601570637
+I3132413236
+I2780570459
+I3222142917
+I3129794692
+I2611590811
+I947031677
+I2991908938
+I750997949
+I3632575131
+I1632014461
+I2846484755
+I2347261779
+I2903959448
+I1397316686
+I1904578392
+I774649578
+I3164598558
+I2429587609
+I738244516
+I1563304975
+I1399317414
+I1021316297
+I3187933234
+I2126780757
+I4011907847
+I4095169219
+I3358010054
+I2729978247
+I3736811646
+I3009656410
+I2893043637
+I4027447385
+I1239610110
+I1488806900
+I2674866844
+I442876374
+I2853687260
+I2785921005
+I3151378528
+I1180567
+I2803146964
+I982221759
+I2192919417
+I3087026181
+I2480838002
+I738452921
+I687986185
+I3049371676
+I3636492954
+I3468311299
+I2379621102
+I788988633
+I1643210601
+I2983998168
+I2492730801
+I2586048705
+I604073029
+I4121082815
+I1496476928
+I2972357110
+I2663116968
+I2642628592
+I2116052039
+I487186279
+I2577680328
+I3974766614
+I730776636
+I3842528855
+I1929093695
+I44626622
+I3989908833
+I1695426222
+I3675479382
+I3051784964
+I1514876613
+I1254036595
+I2420450649
+I3034377361
+I2332990590
+I1535175126
+I185834384
+I1107372900
+I1707278185
+I1286285295
+I3332574225
+I2785672437
+I883170645
+I2005666473
+I3403131327
+I4122021352
+I1464032858
+I3702576112
+I260554598
+I1837731650
+I2594435345
+I75771049
+I2012484289
+I3058649775
+I29979703
+I3861335335
+I2506495152
+I3786448704
+I442947790
+I2582724774
+I4291336243
+I2568189843
+I1923072690
+I1121589611
+I837696302
+I3284631720
+I3865021324
+I3576453165
+I2559531629
+I1459231762
+I3506550036
+I3754420159
+I2622000757
+I124228596
+I1084328605
+I1692830753
+I547273558
+I674282621
+I655259103
+I3188629610
+I490502174
+I2081001293
+I3191330704
+I4109943593
+I1859948504
+I3163806460
+I508833168
+I1256371033
+I2709253790
+I2068956572
+I3092842814
+I3913926529
+I2039638759
+I981982529
+I536094190
+I368855295
+I51993975
+I1597480732
+I4058175522
+I2155896702
+I3196251991
+I1081913893
+I3952353788
+I3545548108
+I2370669647
+I2206572308
+I2576392991
+I1732303374
+I1153136290
+I537641955
+I1738691747
+I3232854186
+I2539632206
+I2829760278
+I3058187853
+I1202425792
+I3762361970
+I2863949342
+I2640635867
+I376638744
+I1857679757
+I330798087
+I1457400505
+I1135610046
+I606400715
+I1859536026
+I509811335
+I529772308
+I2579273244
+I1890382004
+I3959908876
+I2612335971
+I2834052227
+I1434475986
+I3684202717
+I4015011345
+I582567852
+I3689969571
+I3934753460
+I3034960691
+I208573292
+I4004113742
+I3992904842
+I2587153719
+I3529179079
+I1565424987
+I779130678
+I1048582935
+I3213591622
+I3607793434
+I3951254937
+I2047811901
+I7508850
+I248544605
+I4210090324
+I2331490884
+I70057213
+I776474945
+I1345528889
+I3290403612
+I1664955269
+I1533143116
+I545003424
+I4141564478
+I1257326139
+I868843601
+I2337603029
+I1918131449
+I1843439523
+I1125519035
+I673340118
+I421408852
+I1520454906
+I1804722630
+I3621254196
+I2329968000
+I39464672
+I430583134
+I294026512
+I53978525
+I2892276105
+I1418863764
+I3419054451
+I1391595797
+I3544981798
+I4191780858
+I825672357
+I2972000844
+I1571305069
+I4231982845
+I3611916419
+I3045163168
+I2982349733
+I278572141
+I4215338078
+I839860504
+I1819151779
+I1412347479
+I1386770353
+I3914589491
+I3783104977
+I4124296733
+I830546258
+I89825624
+I4110601328
+I2545483429
+I300600527
+I516641158
+I3693021034
+I2852912854
+I3240039868
+I4167407959
+I1479557946
+I3621188804
+I1391590944
+I3578441128
+I1227055556
+I406898396
+I3064054983
+I25835338
+I402664165
+I4097682779
+I2106728012
+I203613622
+I3045467686
+I1381726438
+I3798670110
+I1342314961
+I3552497361
+I535913619
+I2625787583
+I1606574307
+I1101269630
+I1950513752
+I1121355862
+I3586816903
+I438529984
+I2473182121
+I1229997203
+I405445940
+I1695535315
+I427014336
+I3916768430
+I392298359
+I1884642868
+I1244730821
+I741058080
+I567479957
+I3527621168
+I3191971011
+I3267069104
+I4108668146
+I1520795587
+I166581006
+I473794477
+I1562126550
+I929843010
+I889533294
+I1266556608
+I874518650
+I3520162092
+I3013765049
+I4220231414
+I547246449
+I3998093769
+I3737193746
+I3872944207
+I793651876
+I2606384318
+I875991012
+I1394836334
+I4102011644
+I854380426
+I2618666767
+I2568302000
+I1995512132
+I229491093
+I2673500286
+I3364550739
+I3836923416
+I243656987
+I3944388983
+I4064949677
+I1416956378
+I1703244487
+I3990798829
+I2023425781
+I3926702214
+I1229015501
+I3174247824
+I624
+tp2
+Ntp3
+b.
\ No newline at end of file
diff --git a/test-data/stdlib-samples/3.2/test/randv3.pck b/test-data/stdlib-samples/3.2/test/randv3.pck
new file mode 100644
index 0000000..09fc38b
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/randv3.pck
@@ -0,0 +1,633 @@
+crandom
+Random
+p0
+(tRp1
+(I3
+(L2147483648L
+L994081831L
+L2806287265L
+L2228999830L
+L3396498069L
+L2956805457L
+L3273927761L
+L920726507L
+L1862624492L
+L2921292485L
+L1779526843L
+L2469105503L
+L251696293L
+L1254390717L
+L779197080L
+L3165356830L
+L2007365218L
+L1870028812L
+L2896519363L
+L1855578438L
+L979518416L
+L3481710246L
+L3191861507L
+L3993006593L
+L2967971479L
+L3353342753L
+L3576782572L
+L339685558L
+L2367675732L
+L116208555L
+L1220054437L
+L486597056L
+L1912115141L
+L1037044792L
+L4096904723L
+L3409146175L
+L3701651227L
+L315824610L
+L4138604583L
+L1385764892L
+L191878900L
+L2320582219L
+L3420677494L
+L2776503169L
+L1148247403L
+L829555069L
+L902064012L
+L2934642741L
+L2477108577L
+L2583928217L
+L1658612579L
+L2865447913L
+L129147346L
+L3691171887L
+L1569328110L
+L1372860143L
+L1054139183L
+L1617707080L
+L69020592L
+L3810271603L
+L1853953416L
+L3499803073L
+L1027545027L
+L3229043605L
+L250848720L
+L3324932626L
+L3537002962L
+L2494323345L
+L3238103962L
+L4147541579L
+L3636348186L
+L3025455083L
+L2678771977L
+L584700256L
+L3461826909L
+L854511420L
+L943463552L
+L3609239025L
+L3977577989L
+L253070090L
+L777394544L
+L2144086567L
+L1092947992L
+L854327284L
+L2222750082L
+L360183510L
+L1312466483L
+L3227531091L
+L2235022500L
+L3013060530L
+L2541091298L
+L3480126342L
+L1839762775L
+L2632608190L
+L1108889403L
+L3045050923L
+L731513126L
+L3505436788L
+L3062762017L
+L1667392680L
+L1354126500L
+L1143573930L
+L2816645702L
+L2100356873L
+L2817679106L
+L1210746010L
+L2409915248L
+L2910119964L
+L2309001420L
+L220351824L
+L3667352871L
+L3993148590L
+L2886160232L
+L4239393701L
+L1189270581L
+L3067985541L
+L147374573L
+L2355164869L
+L3696013550L
+L4227037846L
+L1905112743L
+L3312843689L
+L2930678266L
+L1828795355L
+L76933594L
+L3987100796L
+L1288361435L
+L3464529151L
+L965498079L
+L1444623093L
+L1372893415L
+L1536235597L
+L1341994850L
+L963594758L
+L2115295754L
+L982098685L
+L1053433904L
+L2078469844L
+L3059765792L
+L1753606181L
+L2130171254L
+L567588194L
+L529629426L
+L3621523534L
+L3027576564L
+L1176438083L
+L4096287858L
+L1168574683L
+L1425058962L
+L1429631655L
+L2902106759L
+L761900641L
+L1329183956L
+L1947050932L
+L447490289L
+L3282516276L
+L200037389L
+L921868197L
+L3331403999L
+L4088760249L
+L2188326318L
+L288401961L
+L1360802675L
+L314302808L
+L3314639210L
+L3749821203L
+L2286081570L
+L2768939062L
+L3200541016L
+L2133495482L
+L385029880L
+L4217232202L
+L3171617231L
+L1660846653L
+L2459987621L
+L2691776124L
+L4225030408L
+L3595396773L
+L1103680661L
+L539064057L
+L1492841101L
+L166195394L
+L757973658L
+L533893054L
+L2784879594L
+L1021821883L
+L2350548162L
+L176852116L
+L3503166025L
+L148079914L
+L1633466236L
+L2773090165L
+L1162846701L
+L3575737795L
+L1624178239L
+L2454894710L
+L3014691938L
+L526355679L
+L1870824081L
+L3362425857L
+L3907566665L
+L3462563184L
+L2229112004L
+L4203735748L
+L1557442481L
+L924133999L
+L1906634214L
+L880459727L
+L4065895870L
+L141426254L
+L1258450159L
+L3243115027L
+L1574958840L
+L313939294L
+L3055664260L
+L3459714255L
+L531778790L
+L509505506L
+L1620227491L
+L2675554942L
+L2516509560L
+L3797299887L
+L237135890L
+L3203142213L
+L1087745310L
+L1897151854L
+L3936590041L
+L132765167L
+L2385908063L
+L1360600289L
+L3574567769L
+L2752788114L
+L2644228966L
+L2377705183L
+L601277909L
+L4046480498L
+L324401408L
+L3279931760L
+L2227059377L
+L1538827493L
+L4220532064L
+L478044564L
+L2917117761L
+L635492832L
+L2319763261L
+L795944206L
+L1820473234L
+L1673151409L
+L1404095402L
+L1661067505L
+L3217106938L
+L2406310683L
+L1931309248L
+L2458622868L
+L3323670524L
+L3266852755L
+L240083943L
+L3168387397L
+L607722198L
+L1256837690L
+L3608124913L
+L4244969357L
+L1289959293L
+L519750328L
+L3229482463L
+L1105196988L
+L1832684479L
+L3761037224L
+L2363631822L
+L3297957711L
+L572766355L
+L1195822137L
+L2239207981L
+L2034241203L
+L163540514L
+L288160255L
+L716403680L
+L4019439143L
+L1536281935L
+L2345100458L
+L2786059178L
+L2822232109L
+L987025395L
+L3061166559L
+L490422513L
+L2551030115L
+L2638707620L
+L1344728502L
+L714108911L
+L2831719700L
+L2188615369L
+L373509061L
+L1351077504L
+L3136217056L
+L783521095L
+L2554949468L
+L2662499550L
+L1203826951L
+L1379632388L
+L1918858985L
+L607465976L
+L1980450237L
+L3540079211L
+L3397813410L
+L2913309266L
+L2289572621L
+L4133935327L
+L4166227663L
+L3371801704L
+L3065474909L
+L3580562343L
+L3832172378L
+L2556130719L
+L310473705L
+L3734014346L
+L2490413810L
+L347233056L
+L526668037L
+L1158393656L
+L544329703L
+L2150085419L
+L3914038146L
+L1060237586L
+L4159394837L
+L113205121L
+L309966775L
+L4098784465L
+L3635222960L
+L2417516569L
+L2089579233L
+L1725807541L
+L2728122526L
+L2365836523L
+L2504078522L
+L1443946869L
+L2384171411L
+L997046534L
+L3249131657L
+L1699875986L
+L3618097146L
+L1716038224L
+L2629818607L
+L2929217876L
+L1367250314L
+L1726434951L
+L1388496325L
+L2107602181L
+L2822366842L
+L3052979190L
+L3796798633L
+L1543813381L
+L959000121L
+L1363845999L
+L2952528150L
+L874184932L
+L1888387194L
+L2328695295L
+L3442959855L
+L841805947L
+L1087739275L
+L3230005434L
+L3045399265L
+L1161817318L
+L2898673139L
+L860011094L
+L940539782L
+L1297818080L
+L4243941623L
+L1577613033L
+L4204131887L
+L3819057225L
+L1969439558L
+L3297963932L
+L241874069L
+L3517033453L
+L2295345664L
+L1098911422L
+L886955008L
+L1477397621L
+L4279347332L
+L3616558791L
+L2384411957L
+L742537731L
+L764221540L
+L2871698900L
+L3530636393L
+L691256644L
+L758730966L
+L1717773090L
+L2751856377L
+L3188484000L
+L3767469670L
+L1623863053L
+L3533236793L
+L4099284176L
+L723921107L
+L310594036L
+L223978745L
+L2266565776L
+L201843303L
+L2969968546L
+L3351170888L
+L3465113624L
+L2712246712L
+L1521383057L
+L2384461798L
+L216357551L
+L2167301975L
+L3144653194L
+L2781220155L
+L3620747666L
+L95971265L
+L4255400243L
+L59999757L
+L4174273472L
+L3974511524L
+L1007123950L
+L3112477628L
+L806461512L
+L3148074008L
+L528352882L
+L2545979588L
+L2562281969L
+L3010249477L
+L1886331611L
+L3210656433L
+L1034099976L
+L2906893579L
+L1197048779L
+L1870004401L
+L3898300490L
+L2686856402L
+L3975723478L
+L613043532L
+L2565674353L
+L3760045310L
+L3468984376L
+L4126258L
+L303855424L
+L3988963552L
+L276256796L
+L544071807L
+L1023872062L
+L1747461519L
+L1975571260L
+L4033766958L
+L2946555557L
+L1492957796L
+L958271685L
+L46480515L
+L907760635L
+L1306626357L
+L819652378L
+L1172300279L
+L1116851319L
+L495601075L
+L1157715330L
+L534220108L
+L377320028L
+L1672286106L
+L2066219284L
+L1842386355L
+L2546059464L
+L1839457336L
+L3476194446L
+L3050550028L
+L594705582L
+L1905813535L
+L1813033412L
+L2700858157L
+L169067972L
+L4252889045L
+L1921944555L
+L497671474L
+L210143935L
+L2688398489L
+L325158375L
+L3450846447L
+L891760597L
+L712802536L
+L1132557436L
+L1417044075L
+L1639889660L
+L1746379970L
+L1478741647L
+L2817563486L
+L2573612532L
+L4266444457L
+L2911601615L
+L804745411L
+L2207254652L
+L1189140646L
+L3829725111L
+L3637367348L
+L1944731747L
+L2193440343L
+L1430195413L
+L1173515229L
+L1582618217L
+L2070767037L
+L247908936L
+L1460675439L
+L556001596L
+L327629335L
+L1036133876L
+L4228129605L
+L999174048L
+L3635804039L
+L1416550481L
+L1270540269L
+L4280743815L
+L39607659L
+L1552540623L
+L2762294062L
+L504137289L
+L4117044239L
+L1417130225L
+L1342970056L
+L1755716449L
+L1169447322L
+L2731401356L
+L2319976745L
+L2869221479L
+L23972655L
+L2251495389L
+L1429860878L
+L3728135992L
+L4241432973L
+L3698275076L
+L216416432L
+L4040046960L
+L246077176L
+L894675685L
+L3932282259L
+L3097205100L
+L2128818650L
+L1319010656L
+L1601974009L
+L2552960957L
+L3554016055L
+L4209395641L
+L2013340102L
+L3370447801L
+L2307272002L
+L1795091354L
+L202109401L
+L988345070L
+L2514870758L
+L1132726850L
+L582746224L
+L3112305421L
+L1843020683L
+L3600189223L
+L1101349165L
+L4211905855L
+L2866677581L
+L2881621130L
+L4165324109L
+L4238773191L
+L3635649550L
+L2670481044L
+L2996248219L
+L1676992480L
+L3473067050L
+L4205793699L
+L4019490897L
+L1579990481L
+L1899617990L
+L1136347713L
+L1802842268L
+L3591752960L
+L1197308739L
+L433629786L
+L4032142790L
+L3148041979L
+L3312138845L
+L3896860449L
+L3298182567L
+L907605170L
+L1658664067L
+L2682980313L
+L2523523173L
+L1208722103L
+L3808530363L
+L1079003946L
+L4282402864L
+L2041010073L
+L2667555071L
+L688018180L
+L1405121012L
+L4167994076L
+L3504695336L
+L1923944749L
+L1143598790L
+L3936268898L
+L3606243846L
+L1017420080L
+L4026211169L
+L596529763L
+L1844259624L
+L2840216282L
+L2673807759L
+L3407202575L
+L2737971083L
+L4075423068L
+L3684057432L
+L3146627241L
+L599650513L
+L69773114L
+L1257035919L
+L807485291L
+L2376230687L
+L3036593147L
+L2642411658L
+L106080044L
+L2199622729L
+L291834511L
+L2697611361L
+L11689733L
+L625123952L
+L3226023062L
+L3229663265L
+L753059444L
+L2843610189L
+L624L
+tp2
+Ntp3
+b.
\ No newline at end of file
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py b/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py
new file mode 100644
index 0000000..1f61e13
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py
@@ -0,0 +1,24 @@
+"""When called as a script, print a comma-separated list of the open
+file descriptors on stdout."""
+
+import errno
+import os
+
+try:
+    _MAXFD = os.sysconf("SC_OPEN_MAX")
+except:
+    _MAXFD = 256
+
+if __name__ == "__main__":
+    fds = []
+    for fd in range(0, _MAXFD):
+        try:
+            st = os.fstat(fd)
+        except OSError as e:
+            if e.errno == errno.EBADF:
+                continue
+            raise
+        # Ignore Solaris door files
+        if st.st_mode & 0xF000 != 0xd000:
+            fds.append(fd)
+    print(','.join(map(str, fds)))
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py b/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py
new file mode 100644
index 0000000..1dc3191
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py
@@ -0,0 +1,7 @@
+"""When called as a script, consumes the input"""
+
+import sys
+
+if __name__ == "__main__":
+    for line in sys.stdin:
+        pass
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py b/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py
new file mode 100644
index 0000000..fe6f9db
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py
@@ -0,0 +1,7 @@
+"""When ran as a script, simulates cat with no arguments."""
+
+import sys
+
+if __name__ == "__main__":
+    for line in sys.stdin:
+        sys.stdout.write(line)
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py b/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py
new file mode 100644
index 0000000..6990637
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py
@@ -0,0 +1,10 @@
+"""When called with a single argument, simulated fgrep with a single
+argument and no options."""
+
+import sys
+
+if __name__ == "__main__":
+    pattern = sys.argv[1]
+    for line in sys.stdin:
+        if pattern in line:
+            sys.stdout.write(line)
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py b/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py
new file mode 100644
index 0000000..6072aec
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py
@@ -0,0 +1,6 @@
+import signal, subprocess, sys
+# On Linux this causes os.waitpid to fail with OSError as the OS has already
+# reaped our child process.  The wait() passing the OSError on to the caller
+# and causing us to exit with an error is what we are testing against.
+signal.signal(signal.SIGCHLD, signal.SIG_IGN)
+subprocess.Popen([sys.executable, '-c', 'print("albatross")']).wait()
diff --git a/test-data/stdlib-samples/3.2/test/support.py b/test-data/stdlib-samples/3.2/test/support.py
new file mode 100644
index 0000000..a36ba28
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/support.py
@@ -0,0 +1,1602 @@
+"""Supporting definitions for the Python regression tests."""
+
+if __name__ != 'test.support':
+    raise ImportError('support must be imported from the test package')
+
+import contextlib
+import errno
+import functools
+import gc
+import socket
+import sys
+import os
+import platform
+import shutil
+import warnings
+import unittest
+import importlib
+import collections
+import re
+import subprocess
+import imp
+import time
+import sysconfig
+import fnmatch
+import logging.handlers
+
+import _thread, threading
+from typing import Any, Dict, cast
+#try:
+#    import multiprocessing.process
+#except ImportError:
+#    multiprocessing = None
+
+
+__all__ = [
+    "Error", "TestFailed", "ResourceDenied", "import_module",
+    "verbose", "use_resources", "max_memuse", "record_original_stdout",
+    "get_original_stdout", "unload", "unlink", "rmtree", "forget",
+    "is_resource_enabled", "requires", "requires_mac_ver",
+    "find_unused_port", "bind_port",
+    "fcmp", "is_jython", "TESTFN", "HOST", "FUZZ", "SAVEDCWD", "temp_cwd",
+    "findfile", "sortdict", "check_syntax_error", "open_urlresource",
+    "check_warnings", "CleanImport", "EnvironmentVarGuard",
+    "TransientResource", "captured_output", "captured_stdout",
+    "captured_stdin", "captured_stderr",
+    "time_out", "socket_peer_reset", "ioerror_peer_reset",
+    "run_with_locale", 'temp_umask', "transient_internet",
+    "set_memlimit", "bigmemtest", "bigaddrspacetest", "BasicTestRunner",
+    "run_unittest", "run_doctest", "threading_setup", "threading_cleanup",
+    "reap_children", "cpython_only", "check_impl_detail", "get_attribute",
+    "swap_item", "swap_attr", "requires_IEEE_754",
+    "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink",
+    "import_fresh_module", "failfast",
+    ]
+
+class Error(Exception):
+    """Base class for regression test exceptions."""
+
+class TestFailed(Error):
+    """Test failed."""
+
+class ResourceDenied(unittest.SkipTest):
+    """Test skipped because it requested a disallowed resource.
+
+    This is raised when a test calls requires() for a resource that
+    has not be enabled.  It is used to distinguish between expected
+    and unexpected skips.
+    """
+
+ at contextlib.contextmanager
+def _ignore_deprecated_imports(ignore=True):
+    """Context manager to suppress package and module deprecation
+    warnings when importing them.
+
+    If ignore is False, this context manager has no effect."""
+    if ignore:
+        with warnings.catch_warnings():
+            warnings.filterwarnings("ignore", ".+ (module|package)",
+                                    DeprecationWarning)
+            yield None
+    else:
+        yield None
+
+
+def import_module(name, deprecated=False):
+    """Import and return the module to be tested, raising SkipTest if
+    it is not available.
+
+    If deprecated is True, any module or package deprecation messages
+    will be suppressed."""
+    with _ignore_deprecated_imports(deprecated):
+        try:
+            return importlib.import_module(name)
+        except ImportError as msg:
+            raise unittest.SkipTest(str(msg))
+
+
+def _save_and_remove_module(name, orig_modules):
+    """Helper function to save and remove a module from sys.modules
+
+       Raise ImportError if the module can't be imported."""
+    # try to import the module and raise an error if it can't be imported
+    if name not in sys.modules:
+        __import__(name)
+        del sys.modules[name]
+    for modname in list(sys.modules):
+        if modname == name or modname.startswith(name + '.'):
+            orig_modules[modname] = sys.modules[modname]
+            del sys.modules[modname]
+
+def _save_and_block_module(name, orig_modules):
+    """Helper function to save and block a module in sys.modules
+
+       Return True if the module was in sys.modules, False otherwise."""
+    saved = True
+    try:
+        orig_modules[name] = sys.modules[name]
+    except KeyError:
+        saved = False
+    sys.modules[name] = None
+    return saved
+
+
+def import_fresh_module(name, fresh=(), blocked=(), deprecated=False):
+    """Imports and returns a module, deliberately bypassing the sys.modules cache
+    and importing a fresh copy of the module. Once the import is complete,
+    the sys.modules cache is restored to its original state.
+
+    Modules named in fresh are also imported anew if needed by the import.
+    If one of these modules can't be imported, None is returned.
+
+    Importing of modules named in blocked is prevented while the fresh import
+    takes place.
+
+    If deprecated is True, any module or package deprecation messages
+    will be suppressed."""
+    # NOTE: test_heapq, test_json and test_warnings include extra sanity checks
+    # to make sure that this utility function is working as expected
+    with _ignore_deprecated_imports(deprecated):
+        # Keep track of modules saved for later restoration as well
+        # as those which just need a blocking entry removed
+        orig_modules = {}
+        names_to_remove = []
+        _save_and_remove_module(name, orig_modules)
+        try:
+            for fresh_name in fresh:
+                _save_and_remove_module(fresh_name, orig_modules)
+            for blocked_name in blocked:
+                if not _save_and_block_module(blocked_name, orig_modules):
+                    names_to_remove.append(blocked_name)
+            fresh_module = importlib.import_module(name)
+        except ImportError:
+            fresh_module = None
+        finally:
+            for orig_name, module in orig_modules.items():
+                sys.modules[orig_name] = module
+            for name_to_remove in names_to_remove:
+                del sys.modules[name_to_remove]
+        return fresh_module
+
+
+def get_attribute(obj, name):
+    """Get an attribute, raising SkipTest if AttributeError is raised."""
+    try:
+        attribute = getattr(obj, name)
+    except AttributeError:
+        raise unittest.SkipTest("module %s has no attribute %s" % (
+            obj.__name__, name))
+    else:
+        return attribute
+
+verbose = 1              # Flag set to 0 by regrtest.py
+use_resources = None # type: Any     # Flag set to [] by regrtest.py
+max_memuse = 0           # Disable bigmem tests (they will still be run with
+                         # small sizes, to make sure they work.)
+real_max_memuse = 0
+failfast = False
+match_tests = None # type: Any
+
+# _original_stdout is meant to hold stdout at the time regrtest began.
+# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
+# The point is to have some flavor of stdout the user can actually see.
+_original_stdout = None # type: 'Any'
+def record_original_stdout(stdout):
+    global _original_stdout
+    _original_stdout = stdout
+
+def get_original_stdout():
+    return _original_stdout or sys.stdout
+
+def unload(name):
+    try:
+        del sys.modules[name]
+    except KeyError:
+        pass
+
+def unlink(filename):
+    try:
+        os.unlink(filename)
+    except OSError as error:
+        # The filename need not exist.
+        if error.errno not in (errno.ENOENT, errno.ENOTDIR):
+            raise
+
+def rmtree(path):
+    try:
+        shutil.rmtree(path)
+    except OSError as error:
+        # Unix returns ENOENT, Windows returns ESRCH.
+        if error.errno not in (errno.ENOENT, errno.ESRCH):
+            raise
+
+def make_legacy_pyc(source):
+    """Move a PEP 3147 pyc/pyo file to its legacy pyc/pyo location.
+
+    The choice of .pyc or .pyo extension is done based on the __debug__ flag
+    value.
+
+    :param source: The file system path to the source file.  The source file
+        does not need to exist, however the PEP 3147 pyc file must exist.
+    :return: The file system path to the legacy pyc file.
+    """
+    pyc_file = imp.cache_from_source(source)
+    up_one = os.path.dirname(os.path.abspath(source))
+    if __debug__:
+        ch = 'c'
+    else:
+        ch = 'o'
+    legacy_pyc = os.path.join(up_one, source + ch)
+    os.rename(pyc_file, legacy_pyc)
+    return legacy_pyc
+
+def forget(modname):
+    """'Forget' a module was ever imported.
+
+    This removes the module from sys.modules and deletes any PEP 3147 or
+    legacy .pyc and .pyo files.
+    """
+    unload(modname)
+    for dirname in sys.path:
+        source = os.path.join(dirname, modname + '.py')
+        # It doesn't matter if they exist or not, unlink all possible
+        # combinations of PEP 3147 and legacy pyc and pyo files.
+        unlink(source + 'c')
+        unlink(source + 'o')
+        unlink(imp.cache_from_source(source, debug_override=True))
+        unlink(imp.cache_from_source(source, debug_override=False))
+
+# On some platforms, should not run gui test even if it is allowed
+# in `use_resources'.
+#if sys.platform.startswith('win'):
+    #import ctypes
+    #import ctypes.wintypes
+    #def _is_gui_available():
+    #    UOI_FLAGS = 1
+    #    WSF_VISIBLE = 0x0001
+    #    class USEROBJECTFLAGS(ctypes.Structure):
+    #        _fields_ = [("fInherit", ctypes.wintypes.BOOL),
+    #                    ("fReserved", ctypes.wintypes.BOOL),
+    #                    ("dwFlags", ctypes.wintypes.DWORD)]
+    #    dll = ctypes.windll.user32
+    #    h = dll.GetProcessWindowStation()
+    #    if not h:
+    #        raise ctypes.WinError()
+    #    uof = USEROBJECTFLAGS()
+    #    needed = ctypes.wintypes.DWORD()
+    #    res = dll.GetUserObjectInformationW(h,
+    #        UOI_FLAGS,
+    #        ctypes.byref(uof),
+    #        ctypes.sizeof(uof),
+    #        ctypes.byref(needed))
+    #    if not res:
+    #        raise ctypes.WinError()
+    #    return bool(uof.dwFlags & WSF_VISIBLE)
+#else:
+def _is_gui_available():
+    return True
+
+def is_resource_enabled(resource):
+    """Test whether a resource is enabled.  Known resources are set by
+    regrtest.py."""
+    return use_resources is not None and resource in use_resources
+
+def requires(resource, msg=None):
+    """Raise ResourceDenied if the specified resource is not available.
+
+    If the caller's module is __main__ then automatically return True.  The
+    possibility of False being returned occurs when regrtest.py is
+    executing.
+    """
+    if resource == 'gui' and not _is_gui_available():
+        raise unittest.SkipTest("Cannot use the 'gui' resource")
+    # see if the caller's module is __main__ - if so, treat as if
+    # the resource was set
+    if sys._getframe(1).f_globals.get("__name__") == "__main__":
+        return
+    if not is_resource_enabled(resource):
+        if msg is None:
+            msg = "Use of the `%s' resource not enabled" % resource
+        raise ResourceDenied(msg)
+
+def requires_mac_ver(*min_version):
+    """Decorator raising SkipTest if the OS is Mac OS X and the OS X
+    version if less than min_version.
+
+    For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version
+    is lesser than 10.5.
+    """
+    def decorator(func):
+        @functools.wraps(func)
+        def wrapper(*args, **kw):
+            if sys.platform == 'darwin':
+                version_txt = platform.mac_ver()[0]
+                try:
+                    version = tuple(map(int, version_txt.split('.')))
+                except ValueError:
+                    pass
+                else:
+                    if version < min_version:
+                        min_version_txt = '.'.join(map(str, min_version))
+                        raise unittest.SkipTest(
+                            "Mac OS X %s or higher required, not %s"
+                            % (min_version_txt, version_txt))
+            return func(*args, **kw)
+        wrapper.min_version = min_version
+        return wrapper
+    return decorator
+
+HOST = 'localhost'
+
+def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
+    """Returns an unused port that should be suitable for binding.  This is
+    achieved by creating a temporary socket with the same family and type as
+    the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to
+    the specified host address (defaults to 0.0.0.0) with the port set to 0,
+    eliciting an unused ephemeral port from the OS.  The temporary socket is
+    then closed and deleted, and the ephemeral port is returned.
+
+    Either this method or bind_port() should be used for any tests where a
+    server socket needs to be bound to a particular port for the duration of
+    the test.  Which one to use depends on whether the calling code is creating
+    a python socket, or if an unused port needs to be provided in a constructor
+    or passed to an external program (i.e. the -accept argument to openssl's
+    s_server mode).  Always prefer bind_port() over find_unused_port() where
+    possible.  Hard coded ports should *NEVER* be used.  As soon as a server
+    socket is bound to a hard coded port, the ability to run multiple instances
+    of the test simultaneously on the same host is compromised, which makes the
+    test a ticking time bomb in a buildbot environment. On Unix buildbots, this
+    may simply manifest as a failed test, which can be recovered from without
+    intervention in most cases, but on Windows, the entire python process can
+    completely and utterly wedge, requiring someone to log in to the buildbot
+    and manually kill the affected process.
+
+    (This is easy to reproduce on Windows, unfortunately, and can be traced to
+    the SO_REUSEADDR socket option having different semantics on Windows versus
+    Unix/Linux.  On Unix, you can't have two AF_INET SOCK_STREAM sockets bind,
+    listen and then accept connections on identical host/ports.  An EADDRINUSE
+    socket.error will be raised at some point (depending on the platform and
+    the order bind and listen were called on each socket).
+
+    However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE
+    will ever be raised when attempting to bind two identical host/ports. When
+    accept() is called on each socket, the second caller's process will steal
+    the port from the first caller, leaving them both in an awkwardly wedged
+    state where they'll no longer respond to any signals or graceful kills, and
+    must be forcibly killed via OpenProcess()/TerminateProcess().
+
+    The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option
+    instead of SO_REUSEADDR, which effectively affords the same semantics as
+    SO_REUSEADDR on Unix.  Given the propensity of Unix developers in the Open
+    Source world compared to Windows ones, this is a common mistake.  A quick
+    look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when
+    openssl.exe is called with the 's_server' option, for example. See
+    http://bugs.python.org/issue2550 for more info.  The following site also
+    has a very thorough description about the implications of both REUSEADDR
+    and EXCLUSIVEADDRUSE on Windows:
+    http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx)
+
+    XXX: although this approach is a vast improvement on previous attempts to
+    elicit unused ports, it rests heavily on the assumption that the ephemeral
+    port returned to us by the OS won't immediately be dished back out to some
+    other process when we close and delete our temporary socket but before our
+    calling code has a chance to bind the returned port.  We can deal with this
+    issue if/when we come across it.
+    """
+
+    tempsock = socket.socket(family, socktype)
+    port = bind_port(tempsock)
+    tempsock.close()
+    #del tempsock
+    return port
+
+def bind_port(sock, host=HOST):
+    """Bind the socket to a free port and return the port number.  Relies on
+    ephemeral ports in order to ensure we are using an unbound port.  This is
+    important as many tests may be running simultaneously, especially in a
+    buildbot environment.  This method raises an exception if the sock.family
+    is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR
+    or SO_REUSEPORT set on it.  Tests should *never* set these socket options
+    for TCP/IP sockets.  The only case for setting these options is testing
+    multicasting via multiple UDP sockets.
+
+    Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e.
+    on Windows), it will be set on the socket.  This will prevent anyone else
+    from bind()'ing to our host/port for the duration of the test.
+    """
+
+    if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM:
+        if hasattr(socket, 'SO_REUSEADDR'):
+            if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1:
+                raise TestFailed("tests should never set the SO_REUSEADDR "   \
+                                 "socket option on TCP/IP sockets!")
+        if hasattr(socket, 'SO_REUSEPORT'):
+            if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1:
+                raise TestFailed("tests should never set the SO_REUSEPORT "   \
+                                 "socket option on TCP/IP sockets!")
+        if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'):
+            sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
+
+    sock.bind((host, 0))
+    port = sock.getsockname()[1]
+    return port
+
+FUZZ = 1e-6
+
+def fcmp(x, y): # fuzzy comparison function
+    if isinstance(x, float) or isinstance(y, float):
+        try:
+            fuzz = (abs(x) + abs(y)) * FUZZ
+            if abs(x-y) <= fuzz:
+                return 0
+        except:
+            pass
+    elif type(x) == type(y) and isinstance(x, (tuple, list)):
+        for i in range(min(len(x), len(y))):
+            outcome = fcmp(x[i], y[i])
+            if outcome != 0:
+                return outcome
+        return (len(x) > len(y)) - (len(x) < len(y))
+    return (x > y) - (x < y)
+
+# decorator for skipping tests on non-IEEE 754 platforms
+requires_IEEE_754 = unittest.skipUnless(
+    cast(Any, float).__getformat__("double").startswith("IEEE"),
+    "test requires IEEE 754 doubles")
+
+is_jython = sys.platform.startswith('java')
+
+TESTFN = ''
+# Filename used for testing
+if os.name == 'java':
+    # Jython disallows @ in module names
+    TESTFN = '$test'
+else:
+    TESTFN = '@test'
+
+# Disambiguate TESTFN for parallel testing, while letting it remain a valid
+# module name.
+TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid())
+
+
+# TESTFN_UNICODE is a non-ascii filename
+TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f"
+if sys.platform == 'darwin':
+    # In Mac OS X's VFS API file names are, by definition, canonically
+    # decomposed Unicode, encoded using UTF-8. See QA1173:
+    # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html
+    import unicodedata
+    TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE)
+TESTFN_ENCODING = sys.getfilesystemencoding()
+
+# TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be
+# encoded by the filesystem encoding (in strict mode). It can be None if we
+# cannot generate such filename.
+TESTFN_UNENCODABLE = None # type: Any
+if os.name in ('nt', 'ce'):
+    # skip win32s (0) or Windows 9x/ME (1)
+    if sys.getwindowsversion().platform >= 2:
+        # Different kinds of characters from various languages to minimize the
+        # probability that the whole name is encodable to MBCS (issue #9819)
+        TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80"
+        try:
+            TESTFN_UNENCODABLE.encode(TESTFN_ENCODING)
+        except UnicodeEncodeError:
+            pass
+        else:
+            print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). '
+                  'Unicode filename tests may not be effective'
+                  % (TESTFN_UNENCODABLE, TESTFN_ENCODING))
+            TESTFN_UNENCODABLE = None
+# Mac OS X denies unencodable filenames (invalid utf-8)
+elif sys.platform != 'darwin':
+    try:
+        # ascii and utf-8 cannot encode the byte 0xff
+        b'\xff'.decode(TESTFN_ENCODING)
+    except UnicodeDecodeError:
+        # 0xff will be encoded using the surrogate character u+DCFF
+        TESTFN_UNENCODABLE = TESTFN \
+            + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape')
+    else:
+        # File system encoding (eg. ISO-8859-* encodings) can encode
+        # the byte 0xff. Skip some unicode filename tests.
+        pass
+
+# Save the initial cwd
+SAVEDCWD = os.getcwd()
+
+ at contextlib.contextmanager
+def temp_cwd(name='tempcwd', quiet=False, path=None):
+    """
+    Context manager that temporarily changes the CWD.
+
+    An existing path may be provided as *path*, in which case this
+    function makes no changes to the file system.
+
+    Otherwise, the new CWD is created in the current directory and it's
+    named *name*. If *quiet* is False (default) and it's not possible to
+    create or change the CWD, an error is raised.  If it's True, only a
+    warning is raised and the original CWD is used.
+    """
+    saved_dir = os.getcwd()
+    is_temporary = False
+    if path is None:
+        path = name
+        try:
+            os.mkdir(name)
+            is_temporary = True
+        except OSError:
+            if not quiet:
+                raise
+            warnings.warn('tests may fail, unable to create temp CWD ' + name,
+                          RuntimeWarning, stacklevel=3)
+    try:
+        os.chdir(path)
+    except OSError:
+        if not quiet:
+            raise
+        warnings.warn('tests may fail, unable to change the CWD to ' + name,
+                      RuntimeWarning, stacklevel=3)
+    try:
+        yield os.getcwd()
+    finally:
+        os.chdir(saved_dir)
+        if is_temporary:
+            rmtree(name)
+
+
+ at contextlib.contextmanager
+def temp_umask(umask):
+    """Context manager that temporarily sets the process umask."""
+    oldmask = os.umask(umask)
+    try:
+        yield None
+    finally:
+        os.umask(oldmask)
+
+
+def findfile(file, here=__file__, subdir=None):
+    """Try to find a file on sys.path and the working directory.  If it is not
+    found the argument passed to the function is returned (this does not
+    necessarily signal failure; could still be the legitimate path)."""
+    if os.path.isabs(file):
+        return file
+    if subdir is not None:
+        file = os.path.join(subdir, file)
+    path = sys.path
+    path = [os.path.dirname(here)] + path
+    for dn in path:
+        fn = os.path.join(dn, file)
+        if os.path.exists(fn): return fn
+    return file
+
+def sortdict(dict):
+    "Like repr(dict), but in sorted order."
+    items = sorted(dict.items())
+    reprpairs = ["%r: %r" % pair for pair in items]
+    withcommas = ", ".join(reprpairs)
+    return "{%s}" % withcommas
+
+def make_bad_fd():
+    """
+    Create an invalid file descriptor by opening and closing a file and return
+    its fd.
+    """
+    file = open(TESTFN, "wb")
+    try:
+        return file.fileno()
+    finally:
+        file.close()
+        unlink(TESTFN)
+
+def check_syntax_error(testcase, statement):
+    raise NotImplementedError('no compile built-in')
+    #testcase.assertRaises(SyntaxError, compile, statement,
+    #                      '<test string>', 'exec')
+
+def open_urlresource(url, *args, **kw):
+    from urllib import request, parse
+
+    check = kw.pop('check', None)
+
+    filename = parse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
+
+    fn = os.path.join(os.path.dirname(__file__), "data", filename)
+
+    def check_valid_file(fn):
+        f = open(fn, *args, **kw)
+        if check is None:
+            return f
+        elif check(f):
+            f.seek(0)
+            return f
+        f.close()
+
+    if os.path.exists(fn):
+        f = check_valid_file(fn)
+        if f is not None:
+            return f
+        unlink(fn)
+
+    # Verify the requirement before downloading the file
+    requires('urlfetch')
+
+    print('\tfetching %s ...' % url, file=get_original_stdout())
+    f = request.urlopen(url, timeout=15)
+    try:
+        with open(fn, "wb") as out:
+            s = f.read()
+            while s:
+                out.write(s)
+                s = f.read()
+    finally:
+        f.close()
+
+    f = check_valid_file(fn)
+    if f is not None:
+        return f
+    raise TestFailed('invalid resource "%s"' % fn)
+
+
+class WarningsRecorder(object):
+    """Convenience wrapper for the warnings list returned on
+       entry to the warnings.catch_warnings() context manager.
+    """
+    def __init__(self, warnings_list):
+        self._warnings = warnings_list
+        self._last = 0
+
+    def __getattr__(self, attr):
+        if len(self._warnings) > self._last:
+            return getattr(self._warnings[-1], attr)
+        elif attr in warnings.WarningMessage._WARNING_DETAILS:
+            return None
+        raise AttributeError("%r has no attribute %r" % (self, attr))
+
+    #@property
+    #def warnings(self):
+    #    return self._warnings[self._last:]
+
+    def reset(self):
+        self._last = len(self._warnings)
+
+
+def _filterwarnings(filters, quiet=False):
+    """Catch the warnings, then check if all the expected
+    warnings have been raised and re-raise unexpected warnings.
+    If 'quiet' is True, only re-raise the unexpected warnings.
+    """
+    # Clear the warning registry of the calling module
+    # in order to re-raise the warnings.
+    frame = sys._getframe(2)
+    registry = frame.f_globals.get('__warningregistry__')
+    if registry:
+        registry.clear()
+    with warnings.catch_warnings(record=True) as w:
+        # Set filter "always" to record all warnings.  Because
+        # test_warnings swap the module, we need to look up in
+        # the sys.modules dictionary.
+        sys.modules['warnings'].simplefilter("always")
+        yield WarningsRecorder(w)
+    # Filter the recorded warnings
+    reraise = list(w)
+    missing = []
+    for msg, cat in filters:
+        seen = False
+        for w in reraise[:]:
+            warning = w.message
+            # Filter out the matching messages
+            if (re.match(msg, str(warning), re.I) and
+                issubclass(warning.__class__, cat)):
+                seen = True
+                reraise.remove(w)
+        if not seen and not quiet:
+            # This filter caught nothing
+            missing.append((msg, cat.__name__))
+    if reraise:
+        raise AssertionError("unhandled warning %s" % reraise[0])
+    if missing:
+        raise AssertionError("filter (%r, %s) did not catch any warning" %
+                             missing[0])
+
+
+ at contextlib.contextmanager
+def check_warnings(*filters, **kwargs):
+    """Context manager to silence warnings.
+
+    Accept 2-tuples as positional arguments:
+        ("message regexp", WarningCategory)
+
+    Optional argument:
+     - if 'quiet' is True, it does not fail if a filter catches nothing
+        (default True without argument,
+         default False if some filters are defined)
+
+    Without argument, it defaults to:
+        check_warnings(("", Warning), quiet=True)
+    """
+    quiet = kwargs.get('quiet')
+    if not filters:
+        filters = (("", Warning),)
+        # Preserve backward compatibility
+        if quiet is None:
+            quiet = True
+    return _filterwarnings(filters, quiet)
+
+
+class CleanImport(object):
+    """Context manager to force import to return a new module reference.
+
+    This is useful for testing module-level behaviours, such as
+    the emission of a DeprecationWarning on import.
+
+    Use like this:
+
+        with CleanImport("foo"):
+            importlib.import_module("foo") # new reference
+    """
+
+    def __init__(self, *module_names):
+        self.original_modules = sys.modules.copy()
+        for module_name in module_names:
+            if module_name in sys.modules:
+                module = sys.modules[module_name]
+                # It is possible that module_name is just an alias for
+                # another module (e.g. stub for modules renamed in 3.x).
+                # In that case, we also need delete the real module to clear
+                # the import cache.
+                if module.__name__ != module_name:
+                    del sys.modules[module.__name__]
+                del sys.modules[module_name]
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *ignore_exc):
+        sys.modules.update(self.original_modules)
+
+
+class EnvironmentVarGuard(dict):
+
+    """Class to help protect the environment variable properly.  Can be used as
+    a context manager."""
+
+    def __init__(self):
+        self._environ = os.environ
+        self._changed = {}
+
+    def __getitem__(self, envvar):
+        return self._environ[envvar]
+
+    def __setitem__(self, envvar, value):
+        # Remember the initial value on the first access
+        if envvar not in self._changed:
+            self._changed[envvar] = self._environ.get(envvar)
+        self._environ[envvar] = value
+
+    def __delitem__(self, envvar):
+        # Remember the initial value on the first access
+        if envvar not in self._changed:
+            self._changed[envvar] = self._environ.get(envvar)
+        if envvar in self._environ:
+            del self._environ[envvar]
+
+    def keys(self):
+        return self._environ.keys()
+
+    def __iter__(self):
+        return iter(self._environ)
+
+    def __len__(self):
+        return len(self._environ)
+
+    def set(self, envvar, value):
+        self[envvar] = value
+
+    def unset(self, envvar):
+        del self[envvar]
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *ignore_exc):
+        for k, v in self._changed.items():
+            if v is None:
+                if k in self._environ:
+                    del self._environ[k]
+            else:
+                self._environ[k] = v
+        os.environ = self._environ
+
+
+class DirsOnSysPath(object):
+    """Context manager to temporarily add directories to sys.path.
+
+    This makes a copy of sys.path, appends any directories given
+    as positional arguments, then reverts sys.path to the copied
+    settings when the context ends.
+
+    Note that *all* sys.path modifications in the body of the
+    context manager, including replacement of the object,
+    will be reverted at the end of the block.
+    """
+
+    def __init__(self, *paths):
+        self.original_value = sys.path[:]
+        self.original_object = sys.path
+        sys.path.extend(paths)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *ignore_exc):
+        sys.path = self.original_object
+        sys.path[:] = self.original_value
+
+
+class TransientResource(object):
+
+    """Raise ResourceDenied if an exception is raised while the context manager
+    is in effect that matches the specified exception and attributes."""
+
+    def __init__(self, exc, **kwargs):
+        self.exc = exc
+        self.attrs = kwargs
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type_=None, value=None, traceback=None):
+        """If type_ is a subclass of self.exc and value has attributes matching
+        self.attrs, raise ResourceDenied.  Otherwise let the exception
+        propagate (if any)."""
+        if type_ is not None and issubclass(self.exc, type_):
+            for attr, attr_value in self.attrs.items():
+                if not hasattr(value, attr):
+                    break
+                if getattr(value, attr) != attr_value:
+                    break
+            else:
+                raise ResourceDenied("an optional resource is not available")
+
+# Context managers that raise ResourceDenied when various issues
+# with the Internet connection manifest themselves as exceptions.
+# XXX deprecate these and use transient_internet() instead
+time_out = TransientResource(IOError, errno=errno.ETIMEDOUT)
+socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET)
+ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET)
+
+
+ at contextlib.contextmanager
+def transient_internet(resource_name, *, timeout=30.0, errnos=()):
+    """Return a context manager that raises ResourceDenied when various issues
+    with the Internet connection manifest themselves as exceptions."""
+    default_errnos = [
+        ('ECONNREFUSED', 111),
+        ('ECONNRESET', 104),
+        ('EHOSTUNREACH', 113),
+        ('ENETUNREACH', 101),
+        ('ETIMEDOUT', 110),
+    ]
+    default_gai_errnos = [
+        ('EAI_AGAIN', -3),
+        ('EAI_FAIL', -4),
+        ('EAI_NONAME', -2),
+        ('EAI_NODATA', -5),
+        # Encountered when trying to resolve IPv6-only hostnames
+        ('WSANO_DATA', 11004),
+    ]
+
+    denied = ResourceDenied("Resource '%s' is not available" % resource_name)
+    captured_errnos = errnos
+    gai_errnos = []
+    if not captured_errnos:
+        captured_errnos = [getattr(errno, name, num)
+                           for name, num in default_errnos]
+        gai_errnos = [getattr(socket, name, num)
+                      for name, num in default_gai_errnos]
+
+    def filter_error(err):
+        n = getattr(err, 'errno', None)
+        if (isinstance(err, socket.timeout) or
+            (isinstance(err, socket.gaierror) and n in gai_errnos) or
+            n in captured_errnos):
+            if not verbose:
+                sys.stderr.write(denied.args[0] + "\n")
+            raise denied from err
+
+    old_timeout = socket.getdefaulttimeout()
+    try:
+        if timeout is not None:
+            socket.setdefaulttimeout(timeout)
+        yield None
+    except IOError as err:
+        # urllib can wrap original socket errors multiple times (!), we must
+        # unwrap to get at the original error.
+        while True:
+            a = err.args
+            if len(a) >= 1 and isinstance(a[0], IOError):
+                err = a[0]
+            # The error can also be wrapped as args[1]:
+            #    except socket.error as msg:
+            #        raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
+            elif len(a) >= 2 and isinstance(a[1], IOError):
+                err = a[1]
+            else:
+                break
+        filter_error(err)
+        raise
+    # XXX should we catch generic exceptions and look for their
+    # __cause__ or __context__?
+    finally:
+        socket.setdefaulttimeout(old_timeout)
+
+
+ at contextlib.contextmanager
+def captured_output(stream_name):
+    """Return a context manager used by captured_stdout/stdin/stderr
+    that temporarily replaces the sys stream *stream_name* with a StringIO."""
+    import io
+    orig_stdout = getattr(sys, stream_name)
+    setattr(sys, stream_name, io.StringIO())
+    try:
+        yield getattr(sys, stream_name)
+    finally:
+        setattr(sys, stream_name, orig_stdout)
+
+def captured_stdout():
+    """Capture the output of sys.stdout:
+
+       with captured_stdout() as s:
+           print("hello")
+       self.assertEqual(s.getvalue(), "hello")
+    """
+    return captured_output("stdout")
+
+def captured_stderr():
+    return captured_output("stderr")
+
+def captured_stdin():
+    return captured_output("stdin")
+
+
+def gc_collect():
+    """Force as many objects as possible to be collected.
+
+    In non-CPython implementations of Python, this is needed because timely
+    deallocation is not guaranteed by the garbage collector.  (Even in CPython
+    this can be the case in case of reference cycles.)  This means that __del__
+    methods may be called later than expected and weakrefs may remain alive for
+    longer than expected.  This function tries its best to force all garbage
+    objects to disappear.
+    """
+    gc.collect()
+    if is_jython:
+        time.sleep(0.1)
+    gc.collect()
+    gc.collect()
+
+
+def python_is_optimized():
+    """Find if Python was built with optimizations."""
+    cflags = sysconfig.get_config_var('PY_CFLAGS') or ''
+    final_opt = ""
+    for opt in cflags.split():
+        if opt.startswith('-O'):
+            final_opt = opt
+    return final_opt and final_opt != '-O0'
+
+
+#=======================================================================
+# Decorator for running a function in a different locale, correctly resetting
+# it afterwards.
+
+def run_with_locale(catstr, *locales):
+    def decorator(func):
+        def inner(*args, **kwds):
+            try:
+                import locale
+                category = getattr(locale, catstr)
+                orig_locale = locale.setlocale(category)
+            except AttributeError:
+                # if the test author gives us an invalid category string
+                raise
+            except:
+                # cannot retrieve original locale, so do nothing
+                locale = orig_locale = None
+            else:
+                for loc in locales:
+                    try:
+                        locale.setlocale(category, loc)
+                        break
+                    except:
+                        pass
+
+            # now run the function, resetting the locale on exceptions
+            try:
+                return func(*args, **kwds)
+            finally:
+                if locale and orig_locale:
+                    locale.setlocale(category, orig_locale)
+        inner.__name__ = func.__name__
+        inner.__doc__ = func.__doc__
+        return inner
+    return decorator
+
+#=======================================================================
+# Big-memory-test support. Separate from 'resources' because memory use
+# should be configurable.
+
+# Some handy shorthands. Note that these are used for byte-limits as well
+# as size-limits, in the various bigmem tests
+_1M = 1024*1024
+_1G = 1024 * _1M
+_2G = 2 * _1G
+_4G = 4 * _1G
+
+MAX_Py_ssize_t = sys.maxsize
+
+def set_memlimit(limit):
+    global max_memuse
+    global real_max_memuse
+    sizes = {
+        'k': 1024,
+        'm': _1M,
+        'g': _1G,
+        't': 1024*_1G,
+    }
+    m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit,
+                 re.IGNORECASE | re.VERBOSE)
+    if m is None:
+        raise ValueError('Invalid memory limit %r' % (limit,))
+    memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
+    real_max_memuse = memlimit
+    if memlimit > MAX_Py_ssize_t:
+        memlimit = MAX_Py_ssize_t
+    if memlimit < _2G - 1:
+        raise ValueError('Memory limit %r too low to be useful' % (limit,))
+    max_memuse = memlimit
+
+def _memory_watchdog(start_evt, finish_evt, period=10.0):
+    """A function which periodically watches the process' memory consumption
+    and prints it out.
+    """
+    # XXX: because of the GIL, and because the very long operations tested
+    # in most bigmem tests are uninterruptible, the loop below gets woken up
+    # much less often than expected.
+    # The polling code should be rewritten in raw C, without holding the GIL,
+    # and push results onto an anonymous pipe.
+    try:
+        page_size = os.sysconf('SC_PAGESIZE')
+    except (ValueError, AttributeError):
+        try:
+            page_size = os.sysconf('SC_PAGE_SIZE')
+        except (ValueError, AttributeError):
+            page_size = 4096
+    procfile = '/proc/{pid}/statm'.format(pid=os.getpid())
+    try:
+        f = open(procfile, 'rb')
+    except IOError as e:
+        warnings.warn('/proc not available for stats: {}'.format(e),
+                      RuntimeWarning)
+        sys.stderr.flush()
+        return
+    with f:
+        start_evt.set()
+        old_data = -1
+        while not finish_evt.wait(period):
+            f.seek(0)
+            statm = f.read().decode('ascii')
+            data = int(statm.split()[5])
+            if data != old_data:
+                old_data = data
+                print(" ... process data size: {data:.1f}G"
+                       .format(data=data * page_size / (1024 ** 3)))
+
+def bigmemtest(size, memuse, dry_run=True):
+    """Decorator for bigmem tests.
+
+    'minsize' is the minimum useful size for the test (in arbitrary,
+    test-interpreted units.) 'memuse' is the number of 'bytes per size' for
+    the test, or a good estimate of it.
+
+    if 'dry_run' is False, it means the test doesn't support dummy runs
+    when -M is not specified.
+    """
+    def decorator(f):
+        def wrapper(self):
+            size = wrapper.size
+            memuse = wrapper.memuse
+            if not real_max_memuse:
+                maxsize = 5147
+            else:
+                maxsize = size
+
+            if ((real_max_memuse or not dry_run)
+                and real_max_memuse < maxsize * memuse):
+                raise unittest.SkipTest(
+                    "not enough memory: %.1fG minimum needed"
+                    % (size * memuse / (1024 ** 3)))
+
+            if real_max_memuse and verbose and threading:
+                print()
+                print(" ... expected peak memory use: {peak:.1f}G"
+                      .format(peak=size * memuse / (1024 ** 3)))
+                sys.stdout.flush()
+                start_evt = threading.Event()
+                finish_evt = threading.Event()
+                t = threading.Thread(target=_memory_watchdog,
+                                     args=(start_evt, finish_evt, 0.5))
+                t.daemon = True
+                t.start()
+                start_evt.set()
+            else:
+                t = None
+
+            try:
+                return f(self, maxsize)
+            finally:
+                if t:
+                    finish_evt.set()
+                    t.join()
+
+        wrapper.size = size
+        wrapper.memuse = memuse
+        return wrapper
+    return decorator
+
+def bigaddrspacetest(f):
+    """Decorator for tests that fill the address space."""
+    def wrapper(self):
+        if max_memuse < MAX_Py_ssize_t:
+            if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31:
+                raise unittest.SkipTest(
+                    "not enough memory: try a 32-bit build instead")
+            else:
+                raise unittest.SkipTest(
+                    "not enough memory: %.1fG minimum needed"
+                    % (MAX_Py_ssize_t / (1024 ** 3)))
+        else:
+            return f(self)
+    return wrapper
+
+#=======================================================================
+# unittest integration.
+
+class BasicTestRunner:
+    def run(self, test):
+        result = unittest.TestResult()
+        test(result)
+        return result
+
+def _id(obj):
+    return obj
+
+def requires_resource(resource):
+    if resource == 'gui' and not _is_gui_available():
+        return unittest.skip("resource 'gui' is not available")
+    if is_resource_enabled(resource):
+        return _id
+    else:
+        return unittest.skip("resource {0!r} is not enabled".format(resource))
+
+def cpython_only(test):
+    """
+    Decorator for tests only applicable on CPython.
+    """
+    return impl_detail(cpython=True)(test)
+
+def impl_detail(msg=None, **guards):
+    if check_impl_detail(**guards):
+        return _id
+    if msg is None:
+        guardnames, default = _parse_guards(guards)
+        if default:
+            msg = "implementation detail not available on {0}"
+        else:
+            msg = "implementation detail specific to {0}"
+        guardnames = sorted(guardnames.keys())
+        msg = msg.format(' or '.join(guardnames))
+    return unittest.skip(msg)
+
+def _parse_guards(guards):
+    # Returns a tuple ({platform_name: run_me}, default_value)
+    if not guards:
+        return ({'cpython': True}, False)
+    is_true = list(guards.values())[0]
+    assert list(guards.values()) == [is_true] * len(guards)   # all True or all False
+    return (guards, not is_true)
+
+# Use the following check to guard CPython's implementation-specific tests --
+# or to run them only on the implementation(s) guarded by the arguments.
+def check_impl_detail(**guards):
+    """This function returns True or False depending on the host platform.
+       Examples:
+          if check_impl_detail():               # only on CPython (default)
+          if check_impl_detail(jython=True):    # only on Jython
+          if check_impl_detail(cpython=False):  # everywhere except on CPython
+    """
+    guards, default = _parse_guards(guards)
+    return guards.get(platform.python_implementation().lower(), default)
+
+
+def _filter_suite(suite, pred):
+    """Recursively filter test cases in a suite based on a predicate."""
+    newtests = []
+    for test in suite._tests:
+        if isinstance(test, unittest.TestSuite):
+            _filter_suite(test, pred)
+            newtests.append(test)
+        else:
+            if pred(test):
+                newtests.append(test)
+    suite._tests = newtests
+
+
+def _run_suite(suite):
+    """Run tests from a unittest.TestSuite-derived class."""
+    if verbose:
+        runner = unittest.TextTestRunner(sys.stdout, verbosity=2,
+                                         failfast=failfast)
+    else:
+        runner = BasicTestRunner()
+
+    result = runner.run(suite)
+    if not result.wasSuccessful():
+        if len(result.errors) == 1 and not result.failures:
+            err = result.errors[0][1]
+        elif len(result.failures) == 1 and not result.errors:
+            err = result.failures[0][1]
+        else:
+            err = "multiple errors occurred"
+            if not verbose: err += "; run in verbose mode for details"
+        raise TestFailed(err)
+
+
+def run_unittest(*classes):
+    """Run tests from unittest.TestCase-derived classes."""
+    valid_types = (unittest.TestSuite, unittest.TestCase)
+    suite = unittest.TestSuite()
+    for cls in classes:
+        if isinstance(cls, str):
+            if cls in sys.modules:
+                suite.addTest(unittest.findTestCases(sys.modules[cls]))
+            else:
+                raise ValueError("str arguments must be keys in sys.modules")
+        elif isinstance(cls, valid_types):
+            suite.addTest(cls)
+        else:
+            suite.addTest(unittest.makeSuite(cls))
+    def case_pred(test):
+        if match_tests is None:
+            return True
+        for name in test.id().split("."):
+            if fnmatch.fnmatchcase(name, match_tests):
+                return True
+        return False
+    _filter_suite(suite, case_pred)
+    _run_suite(suite)
+
+
+#=======================================================================
+# doctest driver.
+
+def run_doctest(module, verbosity=None):
+    """Run doctest on the given module.  Return (#failures, #tests).
+
+    If optional argument verbosity is not specified (or is None), pass
+    support's belief about verbosity on to doctest.  Else doctest's
+    usual behavior is used (it searches sys.argv for -v).
+    """
+
+    import doctest
+
+    if verbosity is None:
+        verbosity = verbose
+    else:
+        verbosity = None
+
+    f, t = doctest.testmod(module, verbose=verbosity)
+    if f:
+        raise TestFailed("%d of %d doctests failed" % (f, t))
+    if verbose:
+        print('doctest (%s) ... %d tests with zero failures' %
+              (module.__name__, t))
+    return f, t
+
+
+#=======================================================================
+# Support for saving and restoring the imported modules.
+
+def modules_setup():
+    return sys.modules.copy(),
+
+def modules_cleanup(oldmodules):
+    # Encoders/decoders are registered permanently within the internal
+    # codec cache. If we destroy the corresponding modules their
+    # globals will be set to None which will trip up the cached functions.
+    encodings = [(k, v) for k, v in sys.modules.items()
+                 if k.startswith('encodings.')]
+    sys.modules.clear()
+    sys.modules.update(encodings)
+    # XXX: This kind of problem can affect more than just encodings. In particular
+    # extension modules (such as _ssl) don't cope with reloading properly.
+    # Really, test modules should be cleaning out the test specific modules they
+    # know they added (ala test_runpy) rather than relying on this function (as
+    # test_importhooks and test_pkg do currently).
+    # Implicitly imported *real* modules should be left alone (see issue 10556).
+    sys.modules.update(oldmodules)
+
+#=======================================================================
+# Threading support to prevent reporting refleaks when running regrtest.py -R
+
+# NOTE: we use thread._count() rather than threading.enumerate() (or the
+# moral equivalent thereof) because a threading.Thread object is still alive
+# until its __bootstrap() method has returned, even after it has been
+# unregistered from the threading module.
+# thread._count(), on the other hand, only gets decremented *after* the
+# __bootstrap() method has returned, which gives us reliable reference counts
+# at the end of a test run.
+
+def threading_setup():
+    if _thread:
+        return _thread._count(), threading._dangling.copy()
+    else:
+        return 1, ()
+
+def threading_cleanup(*original_values):
+    if not _thread:
+        return
+    _MAX_COUNT = 10
+    for count in range(_MAX_COUNT):
+        values = _thread._count(), threading._dangling
+        if values == original_values:
+            break
+        time.sleep(0.1)
+        gc_collect()
+    # XXX print a warning in case of failure?
+
+def reap_threads(func):
+    """Use this function when threads are being used.  This will
+    ensure that the threads are cleaned up even when the test fails.
+    If threading is unavailable this function does nothing.
+    """
+    if not _thread:
+        return func
+
+    @functools.wraps(func)
+    def decorator(*args):
+        key = threading_setup()
+        try:
+            return func(*args)
+        finally:
+            threading_cleanup(*key)
+    return decorator
+
+def reap_children():
+    """Use this function at the end of test_main() whenever sub-processes
+    are started.  This will help ensure that no extra children (zombies)
+    stick around to hog resources and create problems when looking
+    for refleaks.
+    """
+
+    # Reap all our dead child processes so we don't leave zombies around.
+    # These hog resources and might be causing some of the buildbots to die.
+    if hasattr(os, 'waitpid'):
+        any_process = -1
+        while True:
+            try:
+                # This will raise an exception on Windows.  That's ok.
+                pid, status = os.waitpid(any_process, os.WNOHANG)
+                if pid == 0:
+                    break
+            except:
+                break
+
+ at contextlib.contextmanager
+def swap_attr(obj, attr, new_val):
+    """Temporary swap out an attribute with a new object.
+
+    Usage:
+        with swap_attr(obj, "attr", 5):
+            ...
+
+        This will set obj.attr to 5 for the duration of the with: block,
+        restoring the old value at the end of the block. If `attr` doesn't
+        exist on `obj`, it will be created and then deleted at the end of the
+        block.
+    """
+    if hasattr(obj, attr):
+        real_val = getattr(obj, attr)
+        setattr(obj, attr, new_val)
+        try:
+            yield None
+        finally:
+            setattr(obj, attr, real_val)
+    else:
+        setattr(obj, attr, new_val)
+        try:
+            yield None
+        finally:
+            delattr(obj, attr)
+
+ at contextlib.contextmanager
+def swap_item(obj, item, new_val):
+    """Temporary swap out an item with a new object.
+
+    Usage:
+        with swap_item(obj, "item", 5):
+            ...
+
+        This will set obj["item"] to 5 for the duration of the with: block,
+        restoring the old value at the end of the block. If `item` doesn't
+        exist on `obj`, it will be created and then deleted at the end of the
+        block.
+    """
+    if item in obj:
+        real_val = obj[item]
+        obj[item] = new_val
+        try:
+            yield None
+        finally:
+            obj[item] = real_val
+    else:
+        obj[item] = new_val
+        try:
+            yield None
+        finally:
+            del obj[item]
+
+def strip_python_stderr(stderr):
+    """Strip the stderr of a Python process from potential debug output
+    emitted by the interpreter.
+
+    This will typically be run on the result of the communicate() method
+    of a subprocess.Popen object.
+    """
+    stderr = re.sub(br"\[\d+ refs\]\r?\n?$", b"", stderr).strip()
+    return stderr
+
+def args_from_interpreter_flags():
+    """Return a list of command-line arguments reproducing the current
+    settings in sys.flags."""
+    flag_opt_map = {
+        'bytes_warning': 'b',
+        'dont_write_bytecode': 'B',
+        'hash_randomization': 'R',
+        'ignore_environment': 'E',
+        'no_user_site': 's',
+        'no_site': 'S',
+        'optimize': 'O',
+        'verbose': 'v',
+    }
+    args = []
+    for flag, opt in flag_opt_map.items():
+        v = getattr(sys.flags, flag)
+        if v > 0:
+            args.append('-' + opt * v)
+    return args
+
+#============================================================
+# Support for assertions about logging.
+#============================================================
+
+class TestHandler(logging.handlers.BufferingHandler):
+    def __init__(self, matcher):
+        # BufferingHandler takes a "capacity" argument
+        # so as to know when to flush. As we're overriding
+        # shouldFlush anyway, we can set a capacity of zero.
+        # You can call flush() manually to clear out the
+        # buffer.
+        logging.handlers.BufferingHandler.__init__(self, 0)
+        self.matcher = matcher
+
+    def shouldFlush(self, record):
+        return False
+
+    def emit(self, record):
+        self.format(record)
+        self.buffer.append(record.__dict__)
+
+    def matches(self, **kwargs):
+        """
+        Look for a saved dict whose keys/values match the supplied arguments.
+        """
+        result = False
+        for d in self.buffer:
+            if self.matcher.matches(d, **kwargs):
+                result = True
+                break
+        return result
+
+class Matcher(object):
+
+    _partial_matches = ('msg', 'message')
+
+    def matches(self, d, **kwargs):
+        """
+        Try to match a single dict with the supplied arguments.
+
+        Keys whose values are strings and which are in self._partial_matches
+        will be checked for partial (i.e. substring) matches. You can extend
+        this scheme to (for example) do regular expression matching, etc.
+        """
+        result = True
+        for k in kwargs:
+            v = kwargs[k]
+            dv = d.get(k)
+            if not self.match_value(k, dv, v):
+                result = False
+                break
+        return result
+
+    def match_value(self, k, dv, v):
+        """
+        Try to match a single stored value (dv) with a supplied value (v).
+        """
+        if type(v) != type(dv):
+            result = False
+        elif type(dv) is not str or k not in self._partial_matches:
+            result = (v == dv)
+        else:
+            result = dv.find(v) >= 0
+        return result
+
+
+_can_symlink = None # type: Any
+def can_symlink():
+    global _can_symlink
+    if _can_symlink is not None:
+        return _can_symlink
+    symlink_path = TESTFN + "can_symlink"
+    try:
+        os.symlink(TESTFN, symlink_path)
+        can = True
+    except (OSError, NotImplementedError, AttributeError):
+        can = False
+    else:
+        os.remove(symlink_path)
+    _can_symlink = can
+    return can
+
+def skip_unless_symlink(test):
+    """Skip decorator for tests that require functional symlink"""
+    ok = can_symlink()
+    msg = "Requires functional symlink implementation"
+    if ok:
+        return test
+    else:
+        return unittest.skip(msg)(test)
+
+def patch(test_instance, object_to_patch, attr_name, new_value):
+    """Override 'object_to_patch'.'attr_name' with 'new_value'.
+
+    Also, add a cleanup procedure to 'test_instance' to restore
+    'object_to_patch' value for 'attr_name'.
+    The 'attr_name' should be a valid attribute for 'object_to_patch'.
+
+    """
+    # check that 'attr_name' is a real attribute for 'object_to_patch'
+    # will raise AttributeError if it does not exist
+    getattr(object_to_patch, attr_name)
+
+    # keep a copy of the old value
+    attr_is_local = False
+    try:
+        old_value = object_to_patch.__dict__[attr_name]
+    except (AttributeError, KeyError):
+        old_value = getattr(object_to_patch, attr_name, None)
+    else:
+        attr_is_local = True
+
+    # restore the value when the test is done
+    def cleanup():
+        if attr_is_local:
+            setattr(object_to_patch, attr_name, old_value)
+        else:
+            delattr(object_to_patch, attr_name)
+
+    test_instance.addCleanup(cleanup)
+
+    # actually override the attribute
+    setattr(object_to_patch, attr_name, new_value)
diff --git a/test-data/stdlib-samples/3.2/test/test_base64.py b/test-data/stdlib-samples/3.2/test/test_base64.py
new file mode 100644
index 0000000..9e4dcf5
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_base64.py
@@ -0,0 +1,267 @@
+import unittest
+from test import support
+import base64
+import binascii
+import sys
+import subprocess
+
+from typing import Any
+
+
+
+class LegacyBase64TestCase(unittest.TestCase):
+    def test_encodebytes(self) -> None:
+        eq = self.assertEqual
+        eq(base64.encodebytes(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=\n")
+        eq(base64.encodebytes(b"a"), b"YQ==\n")
+        eq(base64.encodebytes(b"ab"), b"YWI=\n")
+        eq(base64.encodebytes(b"abc"), b"YWJj\n")
+        eq(base64.encodebytes(b""), b"")
+        eq(base64.encodebytes(b"abcdefghijklmnopqrstuvwxyz"
+                               b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+                               b"0123456789!@#0^&*();:<>,. []{}"),
+           b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+           b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
+           b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n")
+        self.assertRaises(TypeError, base64.encodebytes, "")
+
+    def test_decodebytes(self) -> None:
+        eq = self.assertEqual
+        eq(base64.decodebytes(b"d3d3LnB5dGhvbi5vcmc=\n"), b"www.python.org")
+        eq(base64.decodebytes(b"YQ==\n"), b"a")
+        eq(base64.decodebytes(b"YWI=\n"), b"ab")
+        eq(base64.decodebytes(b"YWJj\n"), b"abc")
+        eq(base64.decodebytes(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+                               b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
+                               b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"),
+           b"abcdefghijklmnopqrstuvwxyz"
+           b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+           b"0123456789!@#0^&*();:<>,. []{}")
+        eq(base64.decodebytes(b''), b'')
+        self.assertRaises(TypeError, base64.decodebytes, "")
+
+    def test_encode(self) -> None:
+        eq = self.assertEqual
+        from io import BytesIO
+        infp = BytesIO(b'abcdefghijklmnopqrstuvwxyz'
+                       b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+                       b'0123456789!@#0^&*();:<>,. []{}')
+        outfp = BytesIO()
+        base64.encode(infp, outfp)
+        eq(outfp.getvalue(),
+           b'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE'
+           b'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT'
+           b'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n')
+
+    def test_decode(self) -> None:
+        from io import BytesIO
+        infp = BytesIO(b'd3d3LnB5dGhvbi5vcmc=')
+        outfp = BytesIO()
+        base64.decode(infp, outfp)
+        self.assertEqual(outfp.getvalue(), b'www.python.org')
+
+
+class BaseXYTestCase(unittest.TestCase):
+    def test_b64encode(self) -> None:
+        eq = self.assertEqual
+        # Test default alphabet
+        eq(base64.b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=")
+        eq(base64.b64encode(b'\x00'), b'AA==')
+        eq(base64.b64encode(b"a"), b"YQ==")
+        eq(base64.b64encode(b"ab"), b"YWI=")
+        eq(base64.b64encode(b"abc"), b"YWJj")
+        eq(base64.b64encode(b""), b"")
+        eq(base64.b64encode(b"abcdefghijklmnopqrstuvwxyz"
+                            b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+                            b"0123456789!@#0^&*();:<>,. []{}"),
+           b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+           b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
+           b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
+        # Test with arbitrary alternative characters
+        eq(base64.b64encode(b'\xd3V\xbeo\xf7\x1d', altchars=b'*$'), b'01a*b$cd')
+        # Check if passing a str object raises an error
+        self.assertRaises(TypeError, base64.b64encode, "")
+        self.assertRaises(TypeError, base64.b64encode, b"", altchars="")
+        # Test standard alphabet
+        eq(base64.standard_b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=")
+        eq(base64.standard_b64encode(b"a"), b"YQ==")
+        eq(base64.standard_b64encode(b"ab"), b"YWI=")
+        eq(base64.standard_b64encode(b"abc"), b"YWJj")
+        eq(base64.standard_b64encode(b""), b"")
+        eq(base64.standard_b64encode(b"abcdefghijklmnopqrstuvwxyz"
+                                     b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+                                     b"0123456789!@#0^&*();:<>,. []{}"),
+           b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+           b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
+           b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
+        # Check if passing a str object raises an error
+        self.assertRaises(TypeError, base64.standard_b64encode, "")
+        self.assertRaises(TypeError, base64.standard_b64encode, b"", altchars="")
+        # Test with 'URL safe' alternative characters
+        eq(base64.urlsafe_b64encode(b'\xd3V\xbeo\xf7\x1d'), b'01a-b_cd')
+        # Check if passing a str object raises an error
+        self.assertRaises(TypeError, base64.urlsafe_b64encode, "")
+
+    def test_b64decode(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org")
+        eq(base64.b64decode(b'AA=='), b'\x00')
+        eq(base64.b64decode(b"YQ=="), b"a")
+        eq(base64.b64decode(b"YWI="), b"ab")
+        eq(base64.b64decode(b"YWJj"), b"abc")
+        eq(base64.b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+                            b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
+                            b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
+           b"abcdefghijklmnopqrstuvwxyz"
+           b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+           b"0123456789!@#0^&*();:<>,. []{}")
+        eq(base64.b64decode(b''), b'')
+        # Test with arbitrary alternative characters
+        eq(base64.b64decode(b'01a*b$cd', altchars=b'*$'), b'\xd3V\xbeo\xf7\x1d')
+        # Check if passing a str object raises an error
+        self.assertRaises(TypeError, base64.b64decode, "")
+        self.assertRaises(TypeError, base64.b64decode, b"", altchars="")
+        # Test standard alphabet
+        eq(base64.standard_b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org")
+        eq(base64.standard_b64decode(b"YQ=="), b"a")
+        eq(base64.standard_b64decode(b"YWI="), b"ab")
+        eq(base64.standard_b64decode(b"YWJj"), b"abc")
+        eq(base64.standard_b64decode(b""), b"")
+        eq(base64.standard_b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+                                     b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
+                                     b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
+           b"abcdefghijklmnopqrstuvwxyz"
+           b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+           b"0123456789!@#0^&*();:<>,. []{}")
+        # Check if passing a str object raises an error
+        self.assertRaises(TypeError, base64.standard_b64decode, "")
+        self.assertRaises(TypeError, base64.standard_b64decode, b"", altchars="")
+        # Test with 'URL safe' alternative characters
+        eq(base64.urlsafe_b64decode(b'01a-b_cd'), b'\xd3V\xbeo\xf7\x1d')
+        self.assertRaises(TypeError, base64.urlsafe_b64decode, "")
+
+    def test_b64decode_padding_error(self) -> None:
+        self.assertRaises(binascii.Error, base64.b64decode, b'abc')
+
+    def test_b64decode_invalid_chars(self) -> None:
+        # issue 1466065: Test some invalid characters.
+        tests = ((b'%3d==', b'\xdd'),
+                 (b'$3d==', b'\xdd'),
+                 (b'[==', b''),
+                 (b'YW]3=', b'am'),
+                 (b'3{d==', b'\xdd'),
+                 (b'3d}==', b'\xdd'),
+                 (b'@@', b''),
+                 (b'!', b''),
+                 (b'YWJj\nYWI=', b'abcab'))
+        for bstr, res in tests:
+            self.assertEqual(base64.b64decode(bstr), res)
+            with self.assertRaises(binascii.Error):
+                base64.b64decode(bstr, validate=True)
+
+    def test_b32encode(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b32encode(b''), b'')
+        eq(base64.b32encode(b'\x00'), b'AA======')
+        eq(base64.b32encode(b'a'), b'ME======')
+        eq(base64.b32encode(b'ab'), b'MFRA====')
+        eq(base64.b32encode(b'abc'), b'MFRGG===')
+        eq(base64.b32encode(b'abcd'), b'MFRGGZA=')
+        eq(base64.b32encode(b'abcde'), b'MFRGGZDF')
+        self.assertRaises(TypeError, base64.b32encode, "")
+
+    def test_b32decode(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b32decode(b''), b'')
+        eq(base64.b32decode(b'AA======'), b'\x00')
+        eq(base64.b32decode(b'ME======'), b'a')
+        eq(base64.b32decode(b'MFRA===='), b'ab')
+        eq(base64.b32decode(b'MFRGG==='), b'abc')
+        eq(base64.b32decode(b'MFRGGZA='), b'abcd')
+        eq(base64.b32decode(b'MFRGGZDF'), b'abcde')
+        self.assertRaises(TypeError, base64.b32decode, "")
+
+    def test_b32decode_casefold(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b32decode(b'', True), b'')
+        eq(base64.b32decode(b'ME======', True), b'a')
+        eq(base64.b32decode(b'MFRA====', True), b'ab')
+        eq(base64.b32decode(b'MFRGG===', True), b'abc')
+        eq(base64.b32decode(b'MFRGGZA=', True), b'abcd')
+        eq(base64.b32decode(b'MFRGGZDF', True), b'abcde')
+        # Lower cases
+        eq(base64.b32decode(b'me======', True), b'a')
+        eq(base64.b32decode(b'mfra====', True), b'ab')
+        eq(base64.b32decode(b'mfrgg===', True), b'abc')
+        eq(base64.b32decode(b'mfrggza=', True), b'abcd')
+        eq(base64.b32decode(b'mfrggzdf', True), b'abcde')
+        # Expected exceptions
+        self.assertRaises(TypeError, base64.b32decode, b'me======')
+        # Mapping zero and one
+        eq(base64.b32decode(b'MLO23456'), b'b\xdd\xad\xf3\xbe')
+        eq(base64.b32decode(b'M1023456', map01=b'L'), b'b\xdd\xad\xf3\xbe')
+        eq(base64.b32decode(b'M1023456', map01=b'I'), b'b\x1d\xad\xf3\xbe')
+        self.assertRaises(TypeError, base64.b32decode, b"", map01="")
+
+    def test_b32decode_error(self) -> None:
+        self.assertRaises(binascii.Error, base64.b32decode, b'abc')
+        self.assertRaises(binascii.Error, base64.b32decode, b'ABCDEF==')
+
+    def test_b16encode(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b16encode(b'\x01\x02\xab\xcd\xef'), b'0102ABCDEF')
+        eq(base64.b16encode(b'\x00'), b'00')
+        self.assertRaises(TypeError, base64.b16encode, "")
+
+    def test_b16decode(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b16decode(b'0102ABCDEF'), b'\x01\x02\xab\xcd\xef')
+        eq(base64.b16decode(b'00'), b'\x00')
+        # Lower case is not allowed without a flag
+        self.assertRaises(binascii.Error, base64.b16decode, b'0102abcdef')
+        # Case fold
+        eq(base64.b16decode(b'0102abcdef', True), b'\x01\x02\xab\xcd\xef')
+        self.assertRaises(TypeError, base64.b16decode, "")
+
+    def test_ErrorHeritage(self) -> None:
+        self.assertTrue(issubclass(binascii.Error, ValueError))
+
+
+
+class TestMain(unittest.TestCase):
+    def get_output(self, *args_tuple: str, **options: Any) -> Any:
+        args = [sys.executable, '-m', 'base64'] + list(args_tuple)
+        return subprocess.check_output(args, **options)
+
+    def test_encode_decode(self) -> None:
+        output = self.get_output('-t')
+        self.assertSequenceEqual(output.splitlines(), [
+            b"b'Aladdin:open sesame'",
+            br"b'QWxhZGRpbjpvcGVuIHNlc2FtZQ==\n'",
+            b"b'Aladdin:open sesame'",
+        ])
+
+    def test_encode_file(self) -> None:
+        with open(support.TESTFN, 'wb') as fp:
+            fp.write(b'a\xffb\n')
+
+        output = self.get_output('-e', support.TESTFN)
+        self.assertEqual(output.rstrip(), b'Yf9iCg==')
+
+        with open(support.TESTFN, 'rb') as fp:
+            output = self.get_output('-e', stdin=fp)
+        self.assertEqual(output.rstrip(), b'Yf9iCg==')
+
+    def test_decode(self) -> None:
+        with open(support.TESTFN, 'wb') as fp:
+            fp.write(b'Yf9iCg==')
+        output = self.get_output('-d', support.TESTFN)
+        self.assertEqual(output.rstrip(), b'a\xffb')
+
+
+
+def test_main() -> None:
+    support.run_unittest(__name__)
+
+if __name__ == '__main__':
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_fnmatch.py b/test-data/stdlib-samples/3.2/test/test_fnmatch.py
new file mode 100644
index 0000000..0f5a23b
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_fnmatch.py
@@ -0,0 +1,93 @@
+"""Test cases for the fnmatch module."""
+
+from test import support
+import unittest
+
+from fnmatch import fnmatch, fnmatchcase, translate, filter
+
+from typing import Any, AnyStr, Callable
+
+class FnmatchTestCase(unittest.TestCase):
+
+    def check_match(self, filename: AnyStr, pattern: AnyStr,
+                    should_match: int = 1,
+                    fn: Any = fnmatch) -> None:  # see #270
+        if should_match:
+            self.assertTrue(fn(filename, pattern),
+                         "expected %r to match pattern %r"
+                         % (filename, pattern))
+        else:
+            self.assertTrue(not fn(filename, pattern),
+                         "expected %r not to match pattern %r"
+                         % (filename, pattern))
+
+    def test_fnmatch(self) -> None:
+        check = self.check_match
+        check('abc', 'abc')
+        check('abc', '?*?')
+        check('abc', '???*')
+        check('abc', '*???')
+        check('abc', '???')
+        check('abc', '*')
+        check('abc', 'ab[cd]')
+        check('abc', 'ab[!de]')
+        check('abc', 'ab[de]', 0)
+        check('a', '??', 0)
+        check('a', 'b', 0)
+
+        # these test that '\' is handled correctly in character sets;
+        # see SF bug #409651
+        check('\\', r'[\]')
+        check('a', r'[!\]')
+        check('\\', r'[!\]', 0)
+
+        # test that filenames with newlines in them are handled correctly.
+        # http://bugs.python.org/issue6665
+        check('foo\nbar', 'foo*')
+        check('foo\nbar\n', 'foo*')
+        check('\nfoo', 'foo*', False)
+        check('\n', '*')
+
+    def test_mix_bytes_str(self) -> None:
+        self.assertRaises(TypeError, fnmatch, 'test', b'*')
+        self.assertRaises(TypeError, fnmatch, b'test', '*')
+        self.assertRaises(TypeError, fnmatchcase, 'test', b'*')
+        self.assertRaises(TypeError, fnmatchcase, b'test', '*')
+
+    def test_fnmatchcase(self) -> None:
+        check = self.check_match
+        check('AbC', 'abc', 0, fnmatchcase)
+        check('abc', 'AbC', 0, fnmatchcase)
+
+    def test_bytes(self) -> None:
+        self.check_match(b'test', b'te*')
+        self.check_match(b'test\xff', b'te*\xff')
+        self.check_match(b'foo\nbar', b'foo*')
+
+class TranslateTestCase(unittest.TestCase):
+
+    def test_translate(self) -> None:
+        self.assertEqual(translate('*'), '.*\Z(?ms)')
+        self.assertEqual(translate('?'), '.\Z(?ms)')
+        self.assertEqual(translate('a?b*'), 'a.b.*\Z(?ms)')
+        self.assertEqual(translate('[abc]'), '[abc]\Z(?ms)')
+        self.assertEqual(translate('[]]'), '[]]\Z(?ms)')
+        self.assertEqual(translate('[!x]'), '[^x]\Z(?ms)')
+        self.assertEqual(translate('[^x]'), '[\\^x]\Z(?ms)')
+        self.assertEqual(translate('[x'), '\\[x\Z(?ms)')
+
+
+class FilterTestCase(unittest.TestCase):
+
+    def test_filter(self) -> None:
+        self.assertEqual(filter(['a', 'b'], 'a'), ['a'])
+
+
+def test_main() -> None:
+    support.run_unittest(FnmatchTestCase,
+                         TranslateTestCase,
+                         FilterTestCase)
+
+
+if __name__ == "__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_genericpath.py b/test-data/stdlib-samples/3.2/test/test_genericpath.py
new file mode 100644
index 0000000..df0e107
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_genericpath.py
@@ -0,0 +1,313 @@
+"""
+Tests common to genericpath, macpath, ntpath and posixpath
+"""
+
+import unittest
+from test import support
+import os
+
+import genericpath
+import imp
+imp.reload(genericpath) # Make sure we are using the local copy
+
+import sys
+from typing import Any, List
+
+
+def safe_rmdir(dirname: str) -> None:
+    try:
+        os.rmdir(dirname)
+    except OSError:
+        pass
+
+
+class GenericTest(unittest.TestCase):
+    # The path module to be tested
+    pathmodule = genericpath  # type: Any
+    common_attributes = ['commonprefix', 'getsize', 'getatime', 'getctime',
+                         'getmtime', 'exists', 'isdir', 'isfile']
+    attributes = []  # type: List[str]
+
+    def test_no_argument(self) -> None:
+        for attr in self.common_attributes + self.attributes:
+            with self.assertRaises(TypeError):
+                getattr(self.pathmodule, attr)()
+                self.fail("{}.{}() did not raise a TypeError"
+                          .format(self.pathmodule.__name__, attr))
+
+    def test_commonprefix(self) -> None:
+        commonprefix = self.pathmodule.commonprefix
+        self.assertEqual(
+            commonprefix([]),
+            ""
+        )
+        self.assertEqual(
+            commonprefix(["/home/swenson/spam", "/home/swen/spam"]),
+            "/home/swen"
+        )
+        self.assertEqual(
+            commonprefix(["/home/swen/spam", "/home/swen/eggs"]),
+            "/home/swen/"
+        )
+        self.assertEqual(
+            commonprefix(["/home/swen/spam", "/home/swen/spam"]),
+            "/home/swen/spam"
+        )
+        self.assertEqual(
+            commonprefix(["home:swenson:spam", "home:swen:spam"]),
+            "home:swen"
+        )
+        self.assertEqual(
+            commonprefix([":home:swen:spam", ":home:swen:eggs"]),
+            ":home:swen:"
+        )
+        self.assertEqual(
+            commonprefix([":home:swen:spam", ":home:swen:spam"]),
+            ":home:swen:spam"
+        )
+
+        self.assertEqual(
+            commonprefix([b"/home/swenson/spam", b"/home/swen/spam"]),
+            b"/home/swen"
+        )
+        self.assertEqual(
+            commonprefix([b"/home/swen/spam", b"/home/swen/eggs"]),
+            b"/home/swen/"
+        )
+        self.assertEqual(
+            commonprefix([b"/home/swen/spam", b"/home/swen/spam"]),
+            b"/home/swen/spam"
+        )
+        self.assertEqual(
+            commonprefix([b"home:swenson:spam", b"home:swen:spam"]),
+            b"home:swen"
+        )
+        self.assertEqual(
+            commonprefix([b":home:swen:spam", b":home:swen:eggs"]),
+            b":home:swen:"
+        )
+        self.assertEqual(
+            commonprefix([b":home:swen:spam", b":home:swen:spam"]),
+            b":home:swen:spam"
+        )
+
+        testlist = ['', 'abc', 'Xbcd', 'Xb', 'XY', 'abcd',
+                    'aXc', 'abd', 'ab', 'aX', 'abcX']
+        for s1 in testlist:
+            for s2 in testlist:
+                p = commonprefix([s1, s2])
+                self.assertTrue(s1.startswith(p))
+                self.assertTrue(s2.startswith(p))
+                if s1 != s2:
+                    n = len(p)
+                    self.assertNotEqual(s1[n:n+1], s2[n:n+1])
+
+    def test_getsize(self) -> None:
+        f = open(support.TESTFN, "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            self.assertEqual(self.pathmodule.getsize(support.TESTFN), 3)
+        finally:
+            if not f.closed:
+                f.close()
+            support.unlink(support.TESTFN)
+
+    def test_time(self) -> None:
+        f = open(support.TESTFN, "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            f = open(support.TESTFN, "ab")
+            f.write(b"bar")
+            f.close()
+            f = open(support.TESTFN, "rb")
+            d = f.read()
+            f.close()
+            self.assertEqual(d, b"foobar")
+
+            self.assertLessEqual(
+                self.pathmodule.getctime(support.TESTFN),
+                self.pathmodule.getmtime(support.TESTFN)
+            )
+        finally:
+            if not f.closed:
+                f.close()
+            support.unlink(support.TESTFN)
+
+    def test_exists(self) -> None:
+        self.assertIs(self.pathmodule.exists(support.TESTFN), False)
+        f = open(support.TESTFN, "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            self.assertIs(self.pathmodule.exists(support.TESTFN), True)
+            if not self.pathmodule == genericpath:
+                self.assertIs(self.pathmodule.lexists(support.TESTFN),
+                              True)
+        finally:
+            if not f.closed:
+                f.close()
+            support.unlink(support.TESTFN)
+
+    def test_isdir(self) -> None:
+        self.assertIs(self.pathmodule.isdir(support.TESTFN), False)
+        f = open(support.TESTFN, "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            self.assertIs(self.pathmodule.isdir(support.TESTFN), False)
+            os.remove(support.TESTFN)
+            os.mkdir(support.TESTFN)
+            self.assertIs(self.pathmodule.isdir(support.TESTFN), True)
+            os.rmdir(support.TESTFN)
+        finally:
+            if not f.closed:
+                f.close()
+            support.unlink(support.TESTFN)
+            safe_rmdir(support.TESTFN)
+
+    def test_isfile(self) -> None:
+        self.assertIs(self.pathmodule.isfile(support.TESTFN), False)
+        f = open(support.TESTFN, "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            self.assertIs(self.pathmodule.isfile(support.TESTFN), True)
+            os.remove(support.TESTFN)
+            os.mkdir(support.TESTFN)
+            self.assertIs(self.pathmodule.isfile(support.TESTFN), False)
+            os.rmdir(support.TESTFN)
+        finally:
+            if not f.closed:
+                f.close()
+            support.unlink(support.TESTFN)
+            safe_rmdir(support.TESTFN)
+
+
+# Following TestCase is not supposed to be run from test_genericpath.
+# It is inherited by other test modules (macpath, ntpath, posixpath).
+
+class CommonTest(GenericTest):
+    # The path module to be tested
+    pathmodule = None # type: Any
+    common_attributes = GenericTest.common_attributes + [
+        # Properties
+        'curdir', 'pardir', 'extsep', 'sep',
+        'pathsep', 'defpath', 'altsep', 'devnull',
+        # Methods
+        'normcase', 'splitdrive', 'expandvars', 'normpath', 'abspath',
+        'join', 'split', 'splitext', 'isabs', 'basename', 'dirname',
+        'lexists', 'islink', 'ismount', 'expanduser', 'normpath', 'realpath',
+    ]
+
+    def test_normcase(self) -> None:
+        normcase = self.pathmodule.normcase
+        # check that normcase() is idempotent
+        for p in ["FoO/./BaR", b"FoO/./BaR"]:
+            p = normcase(p)
+            self.assertEqual(p, normcase(p))
+
+        self.assertEqual(normcase(''), '')
+        self.assertEqual(normcase(b''), b'')
+
+        # check that normcase raises a TypeError for invalid types
+        for path in (None, True, 0, 2.5, [], bytearray(b''), {'o','o'}):
+            self.assertRaises(TypeError, normcase, path)
+
+    def test_splitdrive(self) -> None:
+        # splitdrive for non-NT paths
+        splitdrive = self.pathmodule.splitdrive
+        self.assertEqual(splitdrive("/foo/bar"), ("", "/foo/bar"))
+        self.assertEqual(splitdrive("foo:bar"), ("", "foo:bar"))
+        self.assertEqual(splitdrive(":foo:bar"), ("", ":foo:bar"))
+
+        self.assertEqual(splitdrive(b"/foo/bar"), (b"", b"/foo/bar"))
+        self.assertEqual(splitdrive(b"foo:bar"), (b"", b"foo:bar"))
+        self.assertEqual(splitdrive(b":foo:bar"), (b"", b":foo:bar"))
+
+    def test_expandvars(self) -> None:
+        if self.pathmodule.__name__ == 'macpath':
+            self.skipTest('macpath.expandvars is a stub')
+        expandvars = self.pathmodule.expandvars
+        with support.EnvironmentVarGuard() as env:
+            env.clear()
+            env["foo"] = "bar"
+            env["{foo"] = "baz1"
+            env["{foo}"] = "baz2"
+            self.assertEqual(expandvars("foo"), "foo")
+            self.assertEqual(expandvars("$foo bar"), "bar bar")
+            self.assertEqual(expandvars("${foo}bar"), "barbar")
+            self.assertEqual(expandvars("$[foo]bar"), "$[foo]bar")
+            self.assertEqual(expandvars("$bar bar"), "$bar bar")
+            self.assertEqual(expandvars("$?bar"), "$?bar")
+            self.assertEqual(expandvars("${foo}bar"), "barbar")
+            self.assertEqual(expandvars("$foo}bar"), "bar}bar")
+            self.assertEqual(expandvars("${foo"), "${foo")
+            self.assertEqual(expandvars("${{foo}}"), "baz1}")
+            self.assertEqual(expandvars("$foo$foo"), "barbar")
+            self.assertEqual(expandvars("$bar$bar"), "$bar$bar")
+
+            self.assertEqual(expandvars(b"foo"), b"foo")
+            self.assertEqual(expandvars(b"$foo bar"), b"bar bar")
+            self.assertEqual(expandvars(b"${foo}bar"), b"barbar")
+            self.assertEqual(expandvars(b"$[foo]bar"), b"$[foo]bar")
+            self.assertEqual(expandvars(b"$bar bar"), b"$bar bar")
+            self.assertEqual(expandvars(b"$?bar"), b"$?bar")
+            self.assertEqual(expandvars(b"${foo}bar"), b"barbar")
+            self.assertEqual(expandvars(b"$foo}bar"), b"bar}bar")
+            self.assertEqual(expandvars(b"${foo"), b"${foo")
+            self.assertEqual(expandvars(b"${{foo}}"), b"baz1}")
+            self.assertEqual(expandvars(b"$foo$foo"), b"barbar")
+            self.assertEqual(expandvars(b"$bar$bar"), b"$bar$bar")
+
+    def test_abspath(self) -> None:
+        self.assertIn("foo", self.pathmodule.abspath("foo"))
+        self.assertIn(b"foo", self.pathmodule.abspath(b"foo"))
+
+        # Abspath returns bytes when the arg is bytes
+        for path in (b'', b'foo', b'f\xf2\xf2', b'/foo', b'C:\\'):
+            self.assertIsInstance(self.pathmodule.abspath(path), bytes)
+
+    def test_realpath(self) -> None:
+        self.assertIn("foo", self.pathmodule.realpath("foo"))
+        self.assertIn(b"foo", self.pathmodule.realpath(b"foo"))
+
+    def test_normpath_issue5827(self) -> None:
+        # Make sure normpath preserves unicode
+        for path in ('', '.', '/', '\\', '///foo/.//bar//'):
+            self.assertIsInstance(self.pathmodule.normpath(path), str)
+
+    def test_abspath_issue3426(self) -> None:
+        # Check that abspath returns unicode when the arg is unicode
+        # with both ASCII and non-ASCII cwds.
+        abspath = self.pathmodule.abspath
+        for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'):
+            self.assertIsInstance(abspath(path), str)
+
+        unicwd = '\xe7w\xf0'
+        try:
+            fsencoding = support.TESTFN_ENCODING or "ascii"
+            unicwd.encode(fsencoding)
+        except (AttributeError, UnicodeEncodeError):
+            # FS encoding is probably ASCII
+            pass
+        else:
+            with support.temp_cwd(unicwd):
+                for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'):
+                    self.assertIsInstance(abspath(path), str)
+
+    @unittest.skipIf(sys.platform == 'darwin',
+        "Mac OS X denies the creation of a directory with an invalid utf8 name")
+    def test_nonascii_abspath(self) -> None:
+        # Test non-ASCII, non-UTF8 bytes in the path.
+        with support.temp_cwd(b'\xe7w\xf0'):
+            self.test_abspath()
+
+
+def test_main() -> None:
+    support.run_unittest(GenericTest)
+
+
+if __name__=="__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_getopt.py b/test-data/stdlib-samples/3.2/test/test_getopt.py
new file mode 100644
index 0000000..3320552
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_getopt.py
@@ -0,0 +1,190 @@
+# test_getopt.py
+# David Goodger <dgoodger at bigfoot.com> 2000-08-19
+
+from test.support import verbose, run_doctest, run_unittest, EnvironmentVarGuard
+import unittest
+
+import getopt
+
+from typing import cast, Any
+
+sentinel = object()
+
+class GetoptTests(unittest.TestCase):
+    def setUp(self) -> None:
+        self.env = EnvironmentVarGuard()
+        if "POSIXLY_CORRECT" in self.env:
+            del self.env["POSIXLY_CORRECT"]
+
+    def tearDown(self) -> None:
+        self.env.__exit__()
+        del self.env
+
+    def assertError(self, *args: Any, **kwargs: Any) -> None:
+        # JLe: work around mypy bug #229
+        cast(Any, self.assertRaises)(getopt.GetoptError, *args, **kwargs)
+
+    def test_short_has_arg(self) -> None:
+        self.assertTrue(getopt.short_has_arg('a', 'a:'))
+        self.assertFalse(getopt.short_has_arg('a', 'a'))
+        self.assertError(getopt.short_has_arg, 'a', 'b')
+
+    def test_long_has_args(self) -> None:
+        has_arg, option = getopt.long_has_args('abc', ['abc='])
+        self.assertTrue(has_arg)
+        self.assertEqual(option, 'abc')
+
+        has_arg, option = getopt.long_has_args('abc', ['abc'])
+        self.assertFalse(has_arg)
+        self.assertEqual(option, 'abc')
+
+        has_arg, option = getopt.long_has_args('abc', ['abcd'])
+        self.assertFalse(has_arg)
+        self.assertEqual(option, 'abcd')
+
+        self.assertError(getopt.long_has_args, 'abc', ['def'])
+        self.assertError(getopt.long_has_args, 'abc', [])
+        self.assertError(getopt.long_has_args, 'abc', ['abcd','abcde'])
+
+    def test_do_shorts(self) -> None:
+        opts, args = getopt.do_shorts([], 'a', 'a', [])
+        self.assertEqual(opts, [('-a', '')])
+        self.assertEqual(args, [])
+
+        opts, args = getopt.do_shorts([], 'a1', 'a:', [])
+        self.assertEqual(opts, [('-a', '1')])
+        self.assertEqual(args, [])
+
+        #opts, args = getopt.do_shorts([], 'a=1', 'a:', [])
+        #self.assertEqual(opts, [('-a', '1')])
+        #self.assertEqual(args, [])
+
+        opts, args = getopt.do_shorts([], 'a', 'a:', ['1'])
+        self.assertEqual(opts, [('-a', '1')])
+        self.assertEqual(args, [])
+
+        opts, args = getopt.do_shorts([], 'a', 'a:', ['1', '2'])
+        self.assertEqual(opts, [('-a', '1')])
+        self.assertEqual(args, ['2'])
+
+        self.assertError(getopt.do_shorts, [], 'a1', 'a', [])
+        self.assertError(getopt.do_shorts, [], 'a', 'a:', [])
+
+    def test_do_longs(self) -> None:
+        opts, args = getopt.do_longs([], 'abc', ['abc'], [])
+        self.assertEqual(opts, [('--abc', '')])
+        self.assertEqual(args, [])
+
+        opts, args = getopt.do_longs([], 'abc=1', ['abc='], [])
+        self.assertEqual(opts, [('--abc', '1')])
+        self.assertEqual(args, [])
+
+        opts, args = getopt.do_longs([], 'abc=1', ['abcd='], [])
+        self.assertEqual(opts, [('--abcd', '1')])
+        self.assertEqual(args, [])
+
+        opts, args = getopt.do_longs([], 'abc', ['ab', 'abc', 'abcd'], [])
+        self.assertEqual(opts, [('--abc', '')])
+        self.assertEqual(args, [])
+
+        # Much like the preceding, except with a non-alpha character ("-") in
+        # option name that precedes "="; failed in
+        # http://python.org/sf/126863
+        opts, args = getopt.do_longs([], 'foo=42', ['foo-bar', 'foo=',], [])
+        self.assertEqual(opts, [('--foo', '42')])
+        self.assertEqual(args, [])
+
+        self.assertError(getopt.do_longs, [], 'abc=1', ['abc'], [])
+        self.assertError(getopt.do_longs, [], 'abc', ['abc='], [])
+
+    def test_getopt(self) -> None:
+        # note: the empty string between '-a' and '--beta' is significant:
+        # it simulates an empty string option argument ('-a ""') on the
+        # command line.
+        cmdline = ['-a', '1', '-b', '--alpha=2', '--beta', '-a', '3', '-a',
+                   '', '--beta', 'arg1', 'arg2']
+
+        opts, args = getopt.getopt(cmdline, 'a:b', ['alpha=', 'beta'])
+        self.assertEqual(opts, [('-a', '1'), ('-b', ''),
+                                ('--alpha', '2'), ('--beta', ''),
+                                ('-a', '3'), ('-a', ''), ('--beta', '')])
+        # Note ambiguity of ('-b', '') and ('-a', '') above. This must be
+        # accounted for in the code that calls getopt().
+        self.assertEqual(args, ['arg1', 'arg2'])
+
+        self.assertError(getopt.getopt, cmdline, 'a:b', ['alpha', 'beta'])
+
+    def test_gnu_getopt(self) -> None:
+        # Test handling of GNU style scanning mode.
+        cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2']
+
+        # GNU style
+        opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta='])
+        self.assertEqual(args, ['arg1'])
+        self.assertEqual(opts, [('-a', ''), ('-b', '1'),
+                                ('--alpha', ''), ('--beta', '2')])
+
+        # recognize "-" as an argument
+        opts, args = getopt.gnu_getopt(['-a', '-', '-b', '-'], 'ab:', [])
+        self.assertEqual(args, ['-'])
+        self.assertEqual(opts, [('-a', ''), ('-b', '-')])
+
+        # Posix style via +
+        opts, args = getopt.gnu_getopt(cmdline, '+ab:', ['alpha', 'beta='])
+        self.assertEqual(opts, [('-a', '')])
+        self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2'])
+
+        # Posix style via POSIXLY_CORRECT
+        self.env["POSIXLY_CORRECT"] = "1"
+        opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta='])
+        self.assertEqual(opts, [('-a', '')])
+        self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2'])
+
+    def test_libref_examples(self) -> None:
+        s = """
+        Examples from the Library Reference:  Doc/lib/libgetopt.tex
+
+        An example using only Unix style options:
+
+
+        >>> import getopt
+        >>> args = '-a -b -cfoo -d bar a1 a2'.split()
+        >>> args
+        ['-a', '-b', '-cfoo', '-d', 'bar', 'a1', 'a2']
+        >>> optlist, args = getopt.getopt(args, 'abc:d:')
+        >>> optlist
+        [('-a', ''), ('-b', ''), ('-c', 'foo'), ('-d', 'bar')]
+        >>> args
+        ['a1', 'a2']
+
+        Using long option names is equally easy:
+
+
+        >>> s = '--condition=foo --testing --output-file abc.def -x a1 a2'
+        >>> args = s.split()
+        >>> args
+        ['--condition=foo', '--testing', '--output-file', 'abc.def', '-x', 'a1', 'a2']
+        >>> optlist, args = getopt.getopt(args, 'x', [
+        ...     'condition=', 'output-file=', 'testing'])
+        >>> optlist
+        [('--condition', 'foo'), ('--testing', ''), ('--output-file', 'abc.def'), ('-x', '')]
+        >>> args
+        ['a1', 'a2']
+        """
+
+        import types
+        m = types.ModuleType("libreftest", s)
+        run_doctest(m, verbose)
+
+    def test_issue4629(self) -> None:
+        longopts, shortopts = getopt.getopt(['--help='], '', ['help='])
+        self.assertEqual(longopts, [('--help', '')])
+        longopts, shortopts = getopt.getopt(['--help=x'], '', ['help='])
+        self.assertEqual(longopts, [('--help', 'x')])
+        self.assertRaises(getopt.GetoptError, getopt.getopt, ['--help='], '', ['help'])
+
+def test_main() -> None:
+    run_unittest(GetoptTests)
+
+if __name__ == "__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_glob.py b/test-data/stdlib-samples/3.2/test/test_glob.py
new file mode 100644
index 0000000..08c8932
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_glob.py
@@ -0,0 +1,122 @@
+import unittest
+from test.support import run_unittest, TESTFN, skip_unless_symlink, can_symlink
+import glob
+import os
+import shutil
+
+from typing import TypeVar, Iterable, List, cast
+
+T = TypeVar('T')
+
+class GlobTests(unittest.TestCase):
+
+    tempdir = ''
+
+    # JLe: work around mypy issue #231
+    def norm(self, first: str, *parts: str) -> str:
+        return os.path.normpath(os.path.join(self.tempdir, first, *parts))
+
+    def mktemp(self, *parts: str) -> None:
+        filename = self.norm(*parts)
+        base, file = os.path.split(filename)
+        if not os.path.exists(base):
+            os.makedirs(base)
+        f = open(filename, 'w')
+        f.close()
+
+    def setUp(self) -> None:
+        self.tempdir = TESTFN+"_dir"
+        self.mktemp('a', 'D')
+        self.mktemp('aab', 'F')
+        self.mktemp('aaa', 'zzzF')
+        self.mktemp('ZZZ')
+        self.mktemp('a', 'bcd', 'EF')
+        self.mktemp('a', 'bcd', 'efg', 'ha')
+        if can_symlink():
+            os.symlink(self.norm('broken'), self.norm('sym1'))
+            os.symlink(self.norm('broken'), self.norm('sym2'))
+
+    def tearDown(self) -> None:
+        shutil.rmtree(self.tempdir)
+
+    def glob(self, *parts: str) -> List[str]:
+        if len(parts) == 1:
+            pattern = parts[0]
+        else:
+            pattern = os.path.join(*parts)
+        p = os.path.join(self.tempdir, pattern)
+        res = glob.glob(p)
+        self.assertEqual(list(glob.iglob(p)), res)
+        return res
+
+    def assertSequencesEqual_noorder(self, l1: Iterable[T],
+                                     l2: Iterable[T]) -> None:
+        self.assertEqual(set(l1), set(l2))
+
+    def test_glob_literal(self) -> None:
+        eq = self.assertSequencesEqual_noorder
+        eq(self.glob('a'), [self.norm('a')])
+        eq(self.glob('a', 'D'), [self.norm('a', 'D')])
+        eq(self.glob('aab'), [self.norm('aab')])
+        eq(self.glob('zymurgy'), cast(List[str], []))  # JLe: work around #230
+
+        # test return types are unicode, but only if os.listdir
+        # returns unicode filenames
+        uniset = set([str])
+        tmp = os.listdir('.')
+        if set(type(x) for x in tmp) == uniset:
+            u1 = glob.glob('*')
+            u2 = glob.glob('./*')
+            self.assertEqual(set(type(r) for r in u1), uniset)
+            self.assertEqual(set(type(r) for r in u2), uniset)
+
+    def test_glob_one_directory(self) -> None:
+        eq = self.assertSequencesEqual_noorder
+        eq(self.glob('a*'), map(self.norm, ['a', 'aab', 'aaa']))
+        eq(self.glob('*a'), map(self.norm, ['a', 'aaa']))
+        eq(self.glob('aa?'), map(self.norm, ['aaa', 'aab']))
+        eq(self.glob('aa[ab]'), map(self.norm, ['aaa', 'aab']))
+        eq(self.glob('*q'), cast(List[str], []))  # JLe: work around #230
+
+    def test_glob_nested_directory(self) -> None:
+        eq = self.assertSequencesEqual_noorder
+        if os.path.normcase("abCD") == "abCD":
+            # case-sensitive filesystem
+            eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF')])
+        else:
+            # case insensitive filesystem
+            eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF'),
+                                             self.norm('a', 'bcd', 'efg')])
+        eq(self.glob('a', 'bcd', '*g'), [self.norm('a', 'bcd', 'efg')])
+
+    def test_glob_directory_names(self) -> None:
+        eq = self.assertSequencesEqual_noorder
+        eq(self.glob('*', 'D'), [self.norm('a', 'D')])
+        eq(self.glob('*', '*a'), cast(List[str], []))  # JLe: work around #230
+        eq(self.glob('a', '*', '*', '*a'),
+           [self.norm('a', 'bcd', 'efg', 'ha')])
+        eq(self.glob('?a?', '*F'), map(self.norm, [os.path.join('aaa', 'zzzF'),
+                                                   os.path.join('aab', 'F')]))
+
+    def test_glob_directory_with_trailing_slash(self) -> None:
+        # We are verifying that when there is wildcard pattern which
+        # ends with os.sep doesn't blow up.
+        res = glob.glob(self.tempdir + '*' + os.sep)
+        self.assertEqual(len(res), 1)
+        # either of these results are reasonable
+        self.assertIn(res[0], [self.tempdir, self.tempdir + os.sep])
+
+    @skip_unless_symlink
+    def test_glob_broken_symlinks(self) -> None:
+        eq = self.assertSequencesEqual_noorder
+        eq(self.glob('sym*'), [self.norm('sym1'), self.norm('sym2')])
+        eq(self.glob('sym1'), [self.norm('sym1')])
+        eq(self.glob('sym2'), [self.norm('sym2')])
+
+
+def test_main() -> None:
+    run_unittest(GlobTests)
+
+
+if __name__ == "__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_posixpath.py b/test-data/stdlib-samples/3.2/test/test_posixpath.py
new file mode 100644
index 0000000..de98975
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_posixpath.py
@@ -0,0 +1,531 @@
+import unittest
+from test import support, test_genericpath
+
+import posixpath
+import genericpath
+
+import imp
+imp.reload(posixpath) # Make sure we are using the local copy
+imp.reload(genericpath)
+
+import os
+import sys
+from posixpath import realpath, abspath, dirname, basename
+
+import posix
+from typing import cast, Any, TypeVar, Callable
+
+T = TypeVar('T')
+
+# An absolute path to a temporary filename for testing. We can't rely on TESTFN
+# being an absolute path, so we need this.
+
+ABSTFN = abspath(support.TESTFN)
+
+def skip_if_ABSTFN_contains_backslash(
+        test: Callable[[T], None]) -> Callable[[T], None]:
+    """
+    On Windows, posixpath.abspath still returns paths with backslashes
+    instead of posix forward slashes. If this is the case, several tests
+    fail, so skip them.
+    """
+    found_backslash = '\\' in ABSTFN
+    msg = "ABSTFN is not a posix path - tests fail"
+    return [test, unittest.skip(msg)(test)][found_backslash]
+
+def safe_rmdir(dirname: str) -> None:
+    try:
+        os.rmdir(dirname)
+    except OSError:
+        pass
+
+class PosixPathTest(unittest.TestCase):
+
+    def setUp(self) -> None:
+        self.tearDown()
+
+    def tearDown(self) -> None:
+        for suffix in ["", "1", "2"]:
+            support.unlink(support.TESTFN + suffix)
+            safe_rmdir(support.TESTFN + suffix)
+
+    def test_join(self) -> None:
+        self.assertEqual(posixpath.join("/foo", "bar", "/bar", "baz"),
+                         "/bar/baz")
+        self.assertEqual(posixpath.join("/foo", "bar", "baz"), "/foo/bar/baz")
+        self.assertEqual(posixpath.join("/foo/", "bar/", "baz/"),
+                         "/foo/bar/baz/")
+
+        self.assertEqual(posixpath.join(b"/foo", b"bar", b"/bar", b"baz"),
+                         b"/bar/baz")
+        self.assertEqual(posixpath.join(b"/foo", b"bar", b"baz"),
+                         b"/foo/bar/baz")
+        self.assertEqual(posixpath.join(b"/foo/", b"bar/", b"baz/"),
+                         b"/foo/bar/baz/")
+
+        self.assertRaises(TypeError, posixpath.join, b"bytes", "str")
+        self.assertRaises(TypeError, posixpath.join, "str", b"bytes")
+
+    def test_split(self) -> None:
+        self.assertEqual(posixpath.split("/foo/bar"), ("/foo", "bar"))
+        self.assertEqual(posixpath.split("/"), ("/", ""))
+        self.assertEqual(posixpath.split("foo"), ("", "foo"))
+        self.assertEqual(posixpath.split("////foo"), ("////", "foo"))
+        self.assertEqual(posixpath.split("//foo//bar"), ("//foo", "bar"))
+
+        self.assertEqual(posixpath.split(b"/foo/bar"), (b"/foo", b"bar"))
+        self.assertEqual(posixpath.split(b"/"), (b"/", b""))
+        self.assertEqual(posixpath.split(b"foo"), (b"", b"foo"))
+        self.assertEqual(posixpath.split(b"////foo"), (b"////", b"foo"))
+        self.assertEqual(posixpath.split(b"//foo//bar"), (b"//foo", b"bar"))
+
+    def splitextTest(self, path: str, filename: str, ext: str) -> None:
+        self.assertEqual(posixpath.splitext(path), (filename, ext))
+        self.assertEqual(posixpath.splitext("/" + path), ("/" + filename, ext))
+        self.assertEqual(posixpath.splitext("abc/" + path),
+                         ("abc/" + filename, ext))
+        self.assertEqual(posixpath.splitext("abc.def/" + path),
+                         ("abc.def/" + filename, ext))
+        self.assertEqual(posixpath.splitext("/abc.def/" + path),
+                         ("/abc.def/" + filename, ext))
+        self.assertEqual(posixpath.splitext(path + "/"),
+                         (filename + ext + "/", ""))
+
+        pathb = bytes(path, "ASCII")
+        filenameb = bytes(filename, "ASCII")
+        extb = bytes(ext, "ASCII")
+
+        self.assertEqual(posixpath.splitext(pathb), (filenameb, extb))
+        self.assertEqual(posixpath.splitext(b"/" + pathb),
+                         (b"/" + filenameb, extb))
+        self.assertEqual(posixpath.splitext(b"abc/" + pathb),
+                         (b"abc/" + filenameb, extb))
+        self.assertEqual(posixpath.splitext(b"abc.def/" + pathb),
+                         (b"abc.def/" + filenameb, extb))
+        self.assertEqual(posixpath.splitext(b"/abc.def/" + pathb),
+                         (b"/abc.def/" + filenameb, extb))
+        self.assertEqual(posixpath.splitext(pathb + b"/"),
+                         (filenameb + extb + b"/", b""))
+
+    def test_splitext(self) -> None:
+        self.splitextTest("foo.bar", "foo", ".bar")
+        self.splitextTest("foo.boo.bar", "foo.boo", ".bar")
+        self.splitextTest("foo.boo.biff.bar", "foo.boo.biff", ".bar")
+        self.splitextTest(".csh.rc", ".csh", ".rc")
+        self.splitextTest("nodots", "nodots", "")
+        self.splitextTest(".cshrc", ".cshrc", "")
+        self.splitextTest("...manydots", "...manydots", "")
+        self.splitextTest("...manydots.ext", "...manydots", ".ext")
+        self.splitextTest(".", ".", "")
+        self.splitextTest("..", "..", "")
+        self.splitextTest("........", "........", "")
+        self.splitextTest("", "", "")
+
+    def test_isabs(self) -> None:
+        self.assertIs(posixpath.isabs(""), False)
+        self.assertIs(posixpath.isabs("/"), True)
+        self.assertIs(posixpath.isabs("/foo"), True)
+        self.assertIs(posixpath.isabs("/foo/bar"), True)
+        self.assertIs(posixpath.isabs("foo/bar"), False)
+
+        self.assertIs(posixpath.isabs(b""), False)
+        self.assertIs(posixpath.isabs(b"/"), True)
+        self.assertIs(posixpath.isabs(b"/foo"), True)
+        self.assertIs(posixpath.isabs(b"/foo/bar"), True)
+        self.assertIs(posixpath.isabs(b"foo/bar"), False)
+
+    def test_basename(self) -> None:
+        self.assertEqual(posixpath.basename("/foo/bar"), "bar")
+        self.assertEqual(posixpath.basename("/"), "")
+        self.assertEqual(posixpath.basename("foo"), "foo")
+        self.assertEqual(posixpath.basename("////foo"), "foo")
+        self.assertEqual(posixpath.basename("//foo//bar"), "bar")
+
+        self.assertEqual(posixpath.basename(b"/foo/bar"), b"bar")
+        self.assertEqual(posixpath.basename(b"/"), b"")
+        self.assertEqual(posixpath.basename(b"foo"), b"foo")
+        self.assertEqual(posixpath.basename(b"////foo"), b"foo")
+        self.assertEqual(posixpath.basename(b"//foo//bar"), b"bar")
+
+    def test_dirname(self) -> None:
+        self.assertEqual(posixpath.dirname("/foo/bar"), "/foo")
+        self.assertEqual(posixpath.dirname("/"), "/")
+        self.assertEqual(posixpath.dirname("foo"), "")
+        self.assertEqual(posixpath.dirname("////foo"), "////")
+        self.assertEqual(posixpath.dirname("//foo//bar"), "//foo")
+
+        self.assertEqual(posixpath.dirname(b"/foo/bar"), b"/foo")
+        self.assertEqual(posixpath.dirname(b"/"), b"/")
+        self.assertEqual(posixpath.dirname(b"foo"), b"")
+        self.assertEqual(posixpath.dirname(b"////foo"), b"////")
+        self.assertEqual(posixpath.dirname(b"//foo//bar"), b"//foo")
+
+    def test_islink(self) -> None:
+        self.assertIs(posixpath.islink(support.TESTFN + "1"), False)
+        self.assertIs(posixpath.lexists(support.TESTFN + "2"), False)
+        f = open(support.TESTFN + "1", "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            self.assertIs(posixpath.islink(support.TESTFN + "1"), False)
+            if support.can_symlink():
+                os.symlink(support.TESTFN + "1", support.TESTFN + "2")
+                self.assertIs(posixpath.islink(support.TESTFN + "2"), True)
+                os.remove(support.TESTFN + "1")
+                self.assertIs(posixpath.islink(support.TESTFN + "2"), True)
+                self.assertIs(posixpath.exists(support.TESTFN + "2"), False)
+                self.assertIs(posixpath.lexists(support.TESTFN + "2"), True)
+        finally:
+            if not f.closed:
+                f.close()
+
+    @staticmethod
+    def _create_file(filename: str) -> None:
+        with open(filename, 'wb') as f:
+            f.write(b'foo')
+
+    def test_samefile(self) -> None:
+        test_fn = support.TESTFN + "1"
+        self._create_file(test_fn)
+        self.assertTrue(posixpath.samefile(test_fn, test_fn))
+        self.assertRaises(TypeError, posixpath.samefile)
+
+    @unittest.skipIf(
+        sys.platform.startswith('win'),
+        "posixpath.samefile does not work on links in Windows")
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    def test_samefile_on_links(self) -> None:
+        test_fn1 = support.TESTFN + "1"
+        test_fn2 = support.TESTFN + "2"
+        self._create_file(test_fn1)
+
+        os.symlink(test_fn1, test_fn2)
+        self.assertTrue(posixpath.samefile(test_fn1, test_fn2))
+        os.remove(test_fn2)
+
+        self._create_file(test_fn2)
+        self.assertFalse(posixpath.samefile(test_fn1, test_fn2))
+
+
+    def test_samestat(self) -> None:
+        test_fn = support.TESTFN + "1"
+        self._create_file(test_fn)
+        test_fns = [test_fn]*2
+        stats = map(os.stat, test_fns)
+        self.assertTrue(posixpath.samestat(*stats))
+
+    @unittest.skipIf(
+        sys.platform.startswith('win'),
+        "posixpath.samestat does not work on links in Windows")
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    def test_samestat_on_links(self) -> None:
+        test_fn1 = support.TESTFN + "1"
+        test_fn2 = support.TESTFN + "2"
+        self._create_file(test_fn1)
+        test_fns = [test_fn1, test_fn2]
+        cast(Any, os.symlink)(*test_fns)
+        stats = map(os.stat, test_fns)
+        self.assertTrue(posixpath.samestat(*stats))
+        os.remove(test_fn2)
+
+        self._create_file(test_fn2)
+        stats = map(os.stat, test_fns)
+        self.assertFalse(posixpath.samestat(*stats))
+
+        self.assertRaises(TypeError, posixpath.samestat)
+
+    def test_ismount(self) -> None:
+        self.assertIs(posixpath.ismount("/"), True)
+        self.assertIs(posixpath.ismount(b"/"), True)
+
+    def test_ismount_non_existent(self) -> None:
+        # Non-existent mountpoint.
+        self.assertIs(posixpath.ismount(ABSTFN), False)
+        try:
+            os.mkdir(ABSTFN)
+            self.assertIs(posixpath.ismount(ABSTFN), False)
+        finally:
+            safe_rmdir(ABSTFN)
+
+    @unittest.skipUnless(support.can_symlink(),
+                         "Test requires symlink support")
+    def test_ismount_symlinks(self) -> None:
+        # Symlinks are never mountpoints.
+        try:
+            os.symlink("/", ABSTFN)
+            self.assertIs(posixpath.ismount(ABSTFN), False)
+        finally:
+            os.unlink(ABSTFN)
+
+    @unittest.skipIf(posix is None, "Test requires posix module")
+    def test_ismount_different_device(self) -> None:
+        # Simulate the path being on a different device from its parent by
+        # mocking out st_dev.
+        save_lstat = os.lstat
+        def fake_lstat(path):
+            st_ino = 0
+            st_dev = 0
+            if path == ABSTFN:
+                st_dev = 1
+                st_ino = 1
+            return posix.stat_result((0, st_ino, st_dev, 0, 0, 0, 0, 0, 0, 0))
+        try:
+            setattr(os, 'lstat', fake_lstat) # mypy: can't modify os directly
+            self.assertIs(posixpath.ismount(ABSTFN), True)
+        finally:
+            setattr(os, 'lstat', save_lstat)
+
+    def test_expanduser(self) -> None:
+        self.assertEqual(posixpath.expanduser("foo"), "foo")
+        self.assertEqual(posixpath.expanduser(b"foo"), b"foo")
+        try:
+            import pwd
+        except ImportError:
+            pass
+        else:
+            self.assertIsInstance(posixpath.expanduser("~/"), str)
+            self.assertIsInstance(posixpath.expanduser(b"~/"), bytes)
+            # if home directory == root directory, this test makes no sense
+            if posixpath.expanduser("~") != '/':
+                self.assertEqual(
+                    posixpath.expanduser("~") + "/",
+                    posixpath.expanduser("~/")
+                )
+                self.assertEqual(
+                    posixpath.expanduser(b"~") + b"/",
+                    posixpath.expanduser(b"~/")
+                )
+            self.assertIsInstance(posixpath.expanduser("~root/"), str)
+            self.assertIsInstance(posixpath.expanduser("~foo/"), str)
+            self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes)
+            self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes)
+
+            with support.EnvironmentVarGuard() as env:
+                env['HOME'] = '/'
+                self.assertEqual(posixpath.expanduser("~"), "/")
+                # expanduser should fall back to using the password database
+                del env['HOME']
+                home = pwd.getpwuid(os.getuid()).pw_dir
+                self.assertEqual(posixpath.expanduser("~"), home)
+
+    def test_normpath(self) -> None:
+        self.assertEqual(posixpath.normpath(""), ".")
+        self.assertEqual(posixpath.normpath("/"), "/")
+        self.assertEqual(posixpath.normpath("//"), "//")
+        self.assertEqual(posixpath.normpath("///"), "/")
+        self.assertEqual(posixpath.normpath("///foo/.//bar//"), "/foo/bar")
+        self.assertEqual(posixpath.normpath("///foo/.//bar//.//..//.//baz"),
+                         "/foo/baz")
+        self.assertEqual(posixpath.normpath("///..//./foo/.//bar"), "/foo/bar")
+
+        self.assertEqual(posixpath.normpath(b""), b".")
+        self.assertEqual(posixpath.normpath(b"/"), b"/")
+        self.assertEqual(posixpath.normpath(b"//"), b"//")
+        self.assertEqual(posixpath.normpath(b"///"), b"/")
+        self.assertEqual(posixpath.normpath(b"///foo/.//bar//"), b"/foo/bar")
+        self.assertEqual(posixpath.normpath(b"///foo/.//bar//.//..//.//baz"),
+                         b"/foo/baz")
+        self.assertEqual(posixpath.normpath(b"///..//./foo/.//bar"),
+                         b"/foo/bar")
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_basic(self) -> None:
+        # Basic operation.
+        try:
+            os.symlink(ABSTFN+"1", ABSTFN)
+            self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
+        finally:
+            support.unlink(ABSTFN)
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_relative(self) -> None:
+        try:
+            os.symlink(posixpath.relpath(ABSTFN+"1"), ABSTFN)
+            self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
+        finally:
+            support.unlink(ABSTFN)
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_symlink_loops(self) -> None:
+        # Bug #930024, return the path unchanged if we get into an infinite
+        # symlink loop.
+        try:
+            old_path = abspath('.')
+            os.symlink(ABSTFN, ABSTFN)
+            self.assertEqual(realpath(ABSTFN), ABSTFN)
+
+            os.symlink(ABSTFN+"1", ABSTFN+"2")
+            os.symlink(ABSTFN+"2", ABSTFN+"1")
+            self.assertEqual(realpath(ABSTFN+"1"), ABSTFN+"1")
+            self.assertEqual(realpath(ABSTFN+"2"), ABSTFN+"2")
+
+            # Test using relative path as well.
+            os.chdir(dirname(ABSTFN))
+            self.assertEqual(realpath(basename(ABSTFN)), ABSTFN)
+        finally:
+            os.chdir(old_path)
+            support.unlink(ABSTFN)
+            support.unlink(ABSTFN+"1")
+            support.unlink(ABSTFN+"2")
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_resolve_parents(self) -> None:
+        # We also need to resolve any symlinks in the parents of a relative
+        # path passed to realpath. E.g.: current working directory is
+        # /usr/doc with 'doc' being a symlink to /usr/share/doc. We call
+        # realpath("a"). This should return /usr/share/doc/a/.
+        try:
+            old_path = abspath('.')
+            os.mkdir(ABSTFN)
+            os.mkdir(ABSTFN + "/y")
+            os.symlink(ABSTFN + "/y", ABSTFN + "/k")
+
+            os.chdir(ABSTFN + "/k")
+            self.assertEqual(realpath("a"), ABSTFN + "/y/a")
+        finally:
+            os.chdir(old_path)
+            support.unlink(ABSTFN + "/k")
+            safe_rmdir(ABSTFN + "/y")
+            safe_rmdir(ABSTFN)
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_resolve_before_normalizing(self) -> None:
+        # Bug #990669: Symbolic links should be resolved before we
+        # normalize the path. E.g.: if we have directories 'a', 'k' and 'y'
+        # in the following hierarchy:
+        # a/k/y
+        #
+        # and a symbolic link 'link-y' pointing to 'y' in directory 'a',
+        # then realpath("link-y/..") should return 'k', not 'a'.
+        try:
+            old_path = abspath('.')
+            os.mkdir(ABSTFN)
+            os.mkdir(ABSTFN + "/k")
+            os.mkdir(ABSTFN + "/k/y")
+            os.symlink(ABSTFN + "/k/y", ABSTFN + "/link-y")
+
+            # Absolute path.
+            self.assertEqual(realpath(ABSTFN + "/link-y/.."), ABSTFN + "/k")
+            # Relative path.
+            os.chdir(dirname(ABSTFN))
+            self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."),
+                             ABSTFN + "/k")
+        finally:
+            os.chdir(old_path)
+            support.unlink(ABSTFN + "/link-y")
+            safe_rmdir(ABSTFN + "/k/y")
+            safe_rmdir(ABSTFN + "/k")
+            safe_rmdir(ABSTFN)
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_resolve_first(self) -> None:
+        # Bug #1213894: The first component of the path, if not absolute,
+        # must be resolved too.
+
+        try:
+            old_path = abspath('.')
+            os.mkdir(ABSTFN)
+            os.mkdir(ABSTFN + "/k")
+            os.symlink(ABSTFN, ABSTFN + "link")
+            os.chdir(dirname(ABSTFN))
+
+            base = basename(ABSTFN)
+            self.assertEqual(realpath(base + "link"), ABSTFN)
+            self.assertEqual(realpath(base + "link/k"), ABSTFN + "/k")
+        finally:
+            os.chdir(old_path)
+            support.unlink(ABSTFN + "link")
+            safe_rmdir(ABSTFN + "/k")
+            safe_rmdir(ABSTFN)
+
+    def test_relpath(self) -> None:
+        real_getcwd = os.getcwd
+        # mypy: can't modify os directly
+        setattr(os, 'getcwd', lambda: r"/home/user/bar")
+        try:
+            curdir = os.path.split(os.getcwd())[-1]
+            self.assertRaises(ValueError, posixpath.relpath, "")
+            self.assertEqual(posixpath.relpath("a"), "a")
+            self.assertEqual(posixpath.relpath(posixpath.abspath("a")), "a")
+            self.assertEqual(posixpath.relpath("a/b"), "a/b")
+            self.assertEqual(posixpath.relpath("../a/b"), "../a/b")
+            self.assertEqual(posixpath.relpath("a", "../b"), "../"+curdir+"/a")
+            self.assertEqual(posixpath.relpath("a/b", "../c"),
+                             "../"+curdir+"/a/b")
+            self.assertEqual(posixpath.relpath("a", "b/c"), "../../a")
+            self.assertEqual(posixpath.relpath("a", "a"), ".")
+            self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x/y/z"), '../../../foo/bar/bat')
+            self.assertEqual(posixpath.relpath("/foo/bar/bat", "/foo/bar"), 'bat')
+            self.assertEqual(posixpath.relpath("/foo/bar/bat", "/"), 'foo/bar/bat')
+            self.assertEqual(posixpath.relpath("/", "/foo/bar/bat"), '../../..')
+            self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x"), '../foo/bar/bat')
+            self.assertEqual(posixpath.relpath("/x", "/foo/bar/bat"), '../../../x')
+            self.assertEqual(posixpath.relpath("/", "/"), '.')
+            self.assertEqual(posixpath.relpath("/a", "/a"), '.')
+            self.assertEqual(posixpath.relpath("/a/b", "/a/b"), '.')
+        finally:
+            setattr(os, 'getcwd', real_getcwd)
+
+    def test_relpath_bytes(self) -> None:
+        real_getcwdb = os.getcwdb
+        # mypy: can't modify os directly
+        setattr(os, 'getcwdb', lambda: br"/home/user/bar")
+        try:
+            curdir = os.path.split(os.getcwdb())[-1]
+            self.assertRaises(ValueError, posixpath.relpath, b"")
+            self.assertEqual(posixpath.relpath(b"a"), b"a")
+            self.assertEqual(posixpath.relpath(posixpath.abspath(b"a")), b"a")
+            self.assertEqual(posixpath.relpath(b"a/b"), b"a/b")
+            self.assertEqual(posixpath.relpath(b"../a/b"), b"../a/b")
+            self.assertEqual(posixpath.relpath(b"a", b"../b"),
+                             b"../"+curdir+b"/a")
+            self.assertEqual(posixpath.relpath(b"a/b", b"../c"),
+                             b"../"+curdir+b"/a/b")
+            self.assertEqual(posixpath.relpath(b"a", b"b/c"), b"../../a")
+            self.assertEqual(posixpath.relpath(b"a", b"a"), b".")
+            self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/x/y/z"), b'../../../foo/bar/bat')
+            self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/foo/bar"), b'bat')
+            self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/"), b'foo/bar/bat')
+            self.assertEqual(posixpath.relpath(b"/", b"/foo/bar/bat"), b'../../..')
+            self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/x"), b'../foo/bar/bat')
+            self.assertEqual(posixpath.relpath(b"/x", b"/foo/bar/bat"), b'../../../x')
+            self.assertEqual(posixpath.relpath(b"/", b"/"), b'.')
+            self.assertEqual(posixpath.relpath(b"/a", b"/a"), b'.')
+            self.assertEqual(posixpath.relpath(b"/a/b", b"/a/b"), b'.')
+
+            self.assertRaises(TypeError, posixpath.relpath, b"bytes", "str")
+            self.assertRaises(TypeError, posixpath.relpath, "str", b"bytes")
+        finally:
+            setattr(os, 'getcwdb', real_getcwdb)
+
+    def test_sameopenfile(self) -> None:
+        fname = support.TESTFN + "1"
+        with open(fname, "wb") as a, open(fname, "wb") as b:
+            self.assertTrue(posixpath.sameopenfile(a.fileno(), b.fileno()))
+
+
+class PosixCommonTest(test_genericpath.CommonTest):
+    pathmodule = posixpath
+    attributes = ['relpath', 'samefile', 'sameopenfile', 'samestat']
+
+
+def test_main() -> None:
+    support.run_unittest(PosixPathTest, PosixCommonTest)
+
+
+if __name__=="__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_pprint.py b/test-data/stdlib-samples/3.2/test/test_pprint.py
new file mode 100644
index 0000000..cf54ebd
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_pprint.py
@@ -0,0 +1,488 @@
+import pprint
+import test.support
+import unittest
+import test.test_set
+import random
+import collections
+import itertools
+
+from typing import List, Any, Dict, Tuple, cast, Callable
+
+# list, tuple and dict subclasses that do or don't overwrite __repr__
+class list2(list):
+    pass
+
+class list3(list):
+    def __repr__(self) -> str:
+        return list.__repr__(self)
+
+class tuple2(tuple):
+    pass
+
+class tuple3(tuple):
+    def __repr__(self) -> str:
+        return tuple.__repr__(self)
+
+class dict2(dict):
+    pass
+
+class dict3(dict):
+    def __repr__(self) -> str:
+        return dict.__repr__(self)
+
+class Unorderable:
+    def __repr__(self) -> str:
+        return str(id(self))
+
+class QueryTestCase(unittest.TestCase):
+
+    def setUp(self) -> None:
+        self.a = list(range(100))  # type: List[Any]
+        self.b = list(range(200))  # type: List[Any]
+        self.a[-12] = self.b
+
+    def test_basic(self) -> None:
+        # Verify .isrecursive() and .isreadable() w/o recursion
+        pp = pprint.PrettyPrinter()
+        for safe in (2, 2.0, complex(0.0, 2.0), "abc", [3], (2,2), {3: 3}, "yaddayadda",
+                     self.a, self.b):
+            # module-level convenience functions
+            self.assertFalse(pprint.isrecursive(safe),
+                             "expected not isrecursive for %r" % (safe,))
+            self.assertTrue(pprint.isreadable(safe),
+                            "expected isreadable for %r" % (safe,))
+            # PrettyPrinter methods
+            self.assertFalse(pp.isrecursive(safe),
+                             "expected not isrecursive for %r" % (safe,))
+            self.assertTrue(pp.isreadable(safe),
+                            "expected isreadable for %r" % (safe,))
+
+    def test_knotted(self) -> None:
+        # Verify .isrecursive() and .isreadable() w/ recursion
+        # Tie a knot.
+        self.b[67] = self.a
+        # Messy dict.
+        self.d = {}  # type: Dict[int, dict]
+        self.d[0] = self.d[1] = self.d[2] = self.d
+
+        pp = pprint.PrettyPrinter()
+
+        for icky in self.a, self.b, self.d, (self.d, self.d):
+            self.assertTrue(pprint.isrecursive(icky), "expected isrecursive")
+            self.assertFalse(pprint.isreadable(icky), "expected not isreadable")
+            self.assertTrue(pp.isrecursive(icky), "expected isrecursive")
+            self.assertFalse(pp.isreadable(icky), "expected not isreadable")
+
+        # Break the cycles.
+        self.d.clear()
+        del self.a[:]
+        del self.b[:]
+
+        for safe in self.a, self.b, self.d, (self.d, self.d):
+            # module-level convenience functions
+            self.assertFalse(pprint.isrecursive(safe),
+                             "expected not isrecursive for %r" % (safe,))
+            self.assertTrue(pprint.isreadable(safe),
+                            "expected isreadable for %r" % (safe,))
+            # PrettyPrinter methods
+            self.assertFalse(pp.isrecursive(safe),
+                             "expected not isrecursive for %r" % (safe,))
+            self.assertTrue(pp.isreadable(safe),
+                            "expected isreadable for %r" % (safe,))
+
+    def test_unreadable(self) -> None:
+        # Not recursive but not readable anyway
+        pp = pprint.PrettyPrinter()
+        for unreadable in type(3), pprint, pprint.isrecursive:
+            # module-level convenience functions
+            self.assertFalse(pprint.isrecursive(unreadable),
+                             "expected not isrecursive for %r" % (unreadable,))
+            self.assertFalse(pprint.isreadable(unreadable),
+                             "expected not isreadable for %r" % (unreadable,))
+            # PrettyPrinter methods
+            self.assertFalse(pp.isrecursive(unreadable),
+                             "expected not isrecursive for %r" % (unreadable,))
+            self.assertFalse(pp.isreadable(unreadable),
+                             "expected not isreadable for %r" % (unreadable,))
+
+    def test_same_as_repr(self) -> None:
+        # Simple objects, small containers and classes that overwrite __repr__
+        # For those the result should be the same as repr().
+        # Ahem.  The docs don't say anything about that -- this appears to
+        # be testing an implementation quirk.  Starting in Python 2.5, it's
+        # not true for dicts:  pprint always sorts dicts by key now; before,
+        # it sorted a dict display if and only if the display required
+        # multiple lines.  For that reason, dicts with more than one element
+        # aren't tested here.
+        for simple in (0, 0, complex(0.0), 0.0, "", b"",
+                       (), tuple2(), tuple3(),
+                       [], list2(), list3(),
+                       {}, dict2(), dict3(),
+                       self.assertTrue, pprint,
+                       -6, -6, complex(-6.,-6.), -1.5, "x", b"x", (3,), [3], {3: 6},
+                       (1,2), [3,4], {5: 6},
+                       tuple2((1,2)), tuple3((1,2)), tuple3(range(100)),  # type: ignore
+                       [3,4], list2(cast(Any, [3,4])), list3(cast(Any, [3,4])),
+                       list3(cast(Any, range(100))), dict2(cast(Any, {5: 6})),
+                       dict3(cast(Any, {5: 6})), # JLe: work around mypy issue #233
+                       range(10, -11, -1)
+                      ):
+            native = repr(simple)
+            for function in "pformat", "saferepr":
+                f = getattr(pprint, function)
+                got = f(simple)
+                self.assertEqual(native, got,
+                                 "expected %s got %s from pprint.%s" %
+                                 (native, got, function))
+
+    def test_basic_line_wrap(self) -> None:
+        # verify basic line-wrapping operation
+        o = {'RPM_cal': 0,
+             'RPM_cal2': 48059,
+             'Speed_cal': 0,
+             'controldesk_runtime_us': 0,
+             'main_code_runtime_us': 0,
+             'read_io_runtime_us': 0,
+             'write_io_runtime_us': 43690}
+        exp = """\
+{'RPM_cal': 0,
+ 'RPM_cal2': 48059,
+ 'Speed_cal': 0,
+ 'controldesk_runtime_us': 0,
+ 'main_code_runtime_us': 0,
+ 'read_io_runtime_us': 0,
+ 'write_io_runtime_us': 43690}"""
+        # JLe: work around mypy issue #232
+        for type in cast(List[Any], [dict, dict2]):
+            self.assertEqual(pprint.pformat(type(o)), exp)
+
+        o2 = range(100)
+        exp = '[%s]' % ',\n '.join(map(str, o2))
+        for type in cast(List[Any], [list, list2]):
+            self.assertEqual(pprint.pformat(type(o2)), exp)
+
+        o3 = tuple(range(100))
+        exp = '(%s)' % ',\n '.join(map(str, o3))
+        for type in cast(List[Any], [tuple, tuple2]):
+            self.assertEqual(pprint.pformat(type(o3)), exp)
+
+        # indent parameter
+        o4 = range(100)
+        exp = '[   %s]' % ',\n    '.join(map(str, o4))
+        for type in cast(List[Any], [list, list2]):
+            self.assertEqual(pprint.pformat(type(o4), indent=4), exp)
+
+    def test_nested_indentations(self) -> None:
+        o1 = list(range(10))
+        o2 = {'first':1, 'second':2, 'third':3}
+        o = [o1, o2]
+        expected = """\
+[   [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+    {   'first': 1,
+        'second': 2,
+        'third': 3}]"""
+        self.assertEqual(pprint.pformat(o, indent=4, width=42), expected)
+
+    def test_sorted_dict(self) -> None:
+        # Starting in Python 2.5, pprint sorts dict displays by key regardless
+        # of how small the dictionary may be.
+        # Before the change, on 32-bit Windows pformat() gave order
+        # 'a', 'c', 'b' here, so this test failed.
+        d = {'a': 1, 'b': 1, 'c': 1}
+        self.assertEqual(pprint.pformat(d), "{'a': 1, 'b': 1, 'c': 1}")
+        self.assertEqual(pprint.pformat([d, d]),
+            "[{'a': 1, 'b': 1, 'c': 1}, {'a': 1, 'b': 1, 'c': 1}]")
+
+        # The next one is kind of goofy.  The sorted order depends on the
+        # alphabetic order of type names:  "int" < "str" < "tuple".  Before
+        # Python 2.5, this was in the test_same_as_repr() test.  It's worth
+        # keeping around for now because it's one of few tests of pprint
+        # against a crazy mix of types.
+        self.assertEqual(pprint.pformat({"xy\tab\n": (3,), 5: [[]], (): {}}),
+            r"{5: [[]], 'xy\tab\n': (3,), (): {}}")
+
+    def test_ordered_dict(self) -> None:
+        words = 'the quick brown fox jumped over a lazy dog'.split()
+        d = collections.OrderedDict(zip(words, itertools.count()))
+        self.assertEqual(pprint.pformat(d),
+"""\
+{'the': 0,
+ 'quick': 1,
+ 'brown': 2,
+ 'fox': 3,
+ 'jumped': 4,
+ 'over': 5,
+ 'a': 6,
+ 'lazy': 7,
+ 'dog': 8}""")
+    def test_subclassing(self) -> None:
+        o = {'names with spaces': 'should be presented using repr()',
+             'others.should.not.be': 'like.this'}
+        exp = """\
+{'names with spaces': 'should be presented using repr()',
+ others.should.not.be: like.this}"""
+        self.assertEqual(DottedPrettyPrinter().pformat(o), exp)
+
+    @test.support.cpython_only
+    def test_set_reprs(self) -> None:
+        # This test creates a complex arrangement of frozensets and
+        # compares the pretty-printed repr against a string hard-coded in
+        # the test.  The hard-coded repr depends on the sort order of
+        # frozensets.
+        #
+        # However, as the docs point out: "Since sets only define
+        # partial ordering (subset relationships), the output of the
+        # list.sort() method is undefined for lists of sets."
+        #
+        # In a nutshell, the test assumes frozenset({0}) will always
+        # sort before frozenset({1}), but:
+        #
+        # >>> frozenset({0}) < frozenset({1})
+        # False
+        # >>> frozenset({1}) < frozenset({0})
+        # False
+        #
+        # Consequently, this test is fragile and
+        # implementation-dependent.  Small changes to Python's sort
+        # algorithm cause the test to fail when it should pass.
+
+        self.assertEqual(pprint.pformat(set()), 'set()')
+        self.assertEqual(pprint.pformat(set(range(3))), '{0, 1, 2}')
+        self.assertEqual(pprint.pformat(frozenset()), 'frozenset()')
+        self.assertEqual(pprint.pformat(frozenset(range(3))), 'frozenset({0, 1, 2})')
+        cube_repr_tgt = """\
+{frozenset(): frozenset({frozenset({2}), frozenset({0}), frozenset({1})}),
+ frozenset({0}): frozenset({frozenset(),
+                            frozenset({0, 2}),
+                            frozenset({0, 1})}),
+ frozenset({1}): frozenset({frozenset(),
+                            frozenset({1, 2}),
+                            frozenset({0, 1})}),
+ frozenset({2}): frozenset({frozenset(),
+                            frozenset({1, 2}),
+                            frozenset({0, 2})}),
+ frozenset({1, 2}): frozenset({frozenset({2}),
+                               frozenset({1}),
+                               frozenset({0, 1, 2})}),
+ frozenset({0, 2}): frozenset({frozenset({2}),
+                               frozenset({0}),
+                               frozenset({0, 1, 2})}),
+ frozenset({0, 1}): frozenset({frozenset({0}),
+                               frozenset({1}),
+                               frozenset({0, 1, 2})}),
+ frozenset({0, 1, 2}): frozenset({frozenset({1, 2}),
+                                  frozenset({0, 2}),
+                                  frozenset({0, 1})})}"""
+        cube = test.test_set.cube(3)
+        self.assertEqual(pprint.pformat(cube), cube_repr_tgt)
+        cubo_repr_tgt = """\
+{frozenset({frozenset({0, 2}), frozenset({0})}): frozenset({frozenset({frozenset({0,
+                                                                                  2}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({0}),
+                                                                       frozenset({0,
+                                                                                  1})}),
+                                                            frozenset({frozenset(),
+                                                                       frozenset({0})}),
+                                                            frozenset({frozenset({2}),
+                                                                       frozenset({0,
+                                                                                  2})})}),
+ frozenset({frozenset({0, 1}), frozenset({1})}): frozenset({frozenset({frozenset({0,
+                                                                                  1}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({0}),
+                                                                       frozenset({0,
+                                                                                  1})}),
+                                                            frozenset({frozenset({1}),
+                                                                       frozenset({1,
+                                                                                  2})}),
+                                                            frozenset({frozenset(),
+                                                                       frozenset({1})})}),
+ frozenset({frozenset({1, 2}), frozenset({1})}): frozenset({frozenset({frozenset({1,
+                                                                                  2}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({2}),
+                                                                       frozenset({1,
+                                                                                  2})}),
+                                                            frozenset({frozenset(),
+                                                                       frozenset({1})}),
+                                                            frozenset({frozenset({1}),
+                                                                       frozenset({0,
+                                                                                  1})})}),
+ frozenset({frozenset({1, 2}), frozenset({2})}): frozenset({frozenset({frozenset({1,
+                                                                                  2}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({1}),
+                                                                       frozenset({1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({2}),
+                                                                       frozenset({0,
+                                                                                  2})}),
+                                                            frozenset({frozenset(),
+                                                                       frozenset({2})})}),
+ frozenset({frozenset(), frozenset({0})}): frozenset({frozenset({frozenset({0}),
+                                                                 frozenset({0,
+                                                                            1})}),
+                                                      frozenset({frozenset({0}),
+                                                                 frozenset({0,
+                                                                            2})}),
+                                                      frozenset({frozenset(),
+                                                                 frozenset({1})}),
+                                                      frozenset({frozenset(),
+                                                                 frozenset({2})})}),
+ frozenset({frozenset(), frozenset({1})}): frozenset({frozenset({frozenset(),
+                                                                 frozenset({0})}),
+                                                      frozenset({frozenset({1}),
+                                                                 frozenset({1,
+                                                                            2})}),
+                                                      frozenset({frozenset(),
+                                                                 frozenset({2})}),
+                                                      frozenset({frozenset({1}),
+                                                                 frozenset({0,
+                                                                            1})})}),
+ frozenset({frozenset({2}), frozenset()}): frozenset({frozenset({frozenset({2}),
+                                                                 frozenset({1,
+                                                                            2})}),
+                                                      frozenset({frozenset(),
+                                                                 frozenset({0})}),
+                                                      frozenset({frozenset(),
+                                                                 frozenset({1})}),
+                                                      frozenset({frozenset({2}),
+                                                                 frozenset({0,
+                                                                            2})})}),
+ frozenset({frozenset({0, 1, 2}), frozenset({0, 1})}): frozenset({frozenset({frozenset({1,
+                                                                                        2}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({0,
+                                                                                        2}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({0}),
+                                                                             frozenset({0,
+                                                                                        1})}),
+                                                                  frozenset({frozenset({1}),
+                                                                             frozenset({0,
+                                                                                        1})})}),
+ frozenset({frozenset({0}), frozenset({0, 1})}): frozenset({frozenset({frozenset(),
+                                                                       frozenset({0})}),
+                                                            frozenset({frozenset({0,
+                                                                                  1}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({0}),
+                                                                       frozenset({0,
+                                                                                  2})}),
+                                                            frozenset({frozenset({1}),
+                                                                       frozenset({0,
+                                                                                  1})})}),
+ frozenset({frozenset({2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({0,
+                                                                                  2}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({2}),
+                                                                       frozenset({1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({0}),
+                                                                       frozenset({0,
+                                                                                  2})}),
+                                                            frozenset({frozenset(),
+                                                                       frozenset({2})})}),
+ frozenset({frozenset({0, 1, 2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({1,
+                                                                                        2}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({0,
+                                                                                        1}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({0}),
+                                                                             frozenset({0,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({2}),
+                                                                             frozenset({0,
+                                                                                        2})})}),
+ frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}): frozenset({frozenset({frozenset({0,
+                                                                                        2}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({0,
+                                                                                        1}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({2}),
+                                                                             frozenset({1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({1}),
+                                                                             frozenset({1,
+                                                                                        2})})})}"""
+
+        cubo = test.test_set.linegraph(cube)
+        self.assertEqual(pprint.pformat(cubo), cubo_repr_tgt)
+
+    def test_depth(self) -> None:
+        nested_tuple = (1, (2, (3, (4, (5, 6)))))
+        nested_dict = {1: {2: {3: {4: {5: {6: 6}}}}}}
+        nested_list = [1, [2, [3, [4, [5, [6, []]]]]]]
+        self.assertEqual(pprint.pformat(nested_tuple), repr(nested_tuple))
+        self.assertEqual(pprint.pformat(nested_dict), repr(nested_dict))
+        self.assertEqual(pprint.pformat(nested_list), repr(nested_list))
+
+        lv1_tuple = '(1, (...))'
+        lv1_dict = '{1: {...}}'
+        lv1_list = '[1, [...]]'
+        self.assertEqual(pprint.pformat(nested_tuple, depth=1), lv1_tuple)
+        self.assertEqual(pprint.pformat(nested_dict, depth=1), lv1_dict)
+        self.assertEqual(pprint.pformat(nested_list, depth=1), lv1_list)
+
+    def test_sort_unorderable_values(self) -> None:
+        # Issue 3976:  sorted pprints fail for unorderable values.
+        n = 20
+        keys = [Unorderable() for i in range(n)]
+        random.shuffle(keys)
+        skeys = sorted(keys, key=id)
+        clean = lambda s: s.replace(' ', '').replace('\n','')  # type: Callable[[str], str]
+
+        self.assertEqual(clean(pprint.pformat(set(keys))),
+            '{' + ','.join(map(repr, skeys)) + '}')
+        self.assertEqual(clean(pprint.pformat(frozenset(keys))),
+            'frozenset({' + ','.join(map(repr, skeys)) + '})')
+        self.assertEqual(clean(pprint.pformat(dict.fromkeys(keys))),
+            '{' + ','.join('%r:None' % k for k in skeys) + '}')
+
+class DottedPrettyPrinter(pprint.PrettyPrinter):
+
+    def format(self, object: object, context: Dict[int, Any], maxlevels: int,
+               level: int) -> Tuple[str, int, int]:
+        if isinstance(object, str):
+            if ' ' in object:
+                return repr(object), 1, 0
+            else:
+                return object, 0, 0
+        else:
+            return pprint.PrettyPrinter.format(
+                self, object, context, maxlevels, level)
+
+
+def test_main() -> None:
+    test.support.run_unittest(QueryTestCase)
+
+
+if __name__ == "__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_random.py b/test-data/stdlib-samples/3.2/test/test_random.py
new file mode 100644
index 0000000..5989cee
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_random.py
@@ -0,0 +1,533 @@
+#!/usr/bin/env python3
+
+import unittest
+import random
+import time
+import pickle
+import warnings
+from math import log, exp, pi, fsum, sin
+from test import support
+
+from typing import Any, Dict, List, Callable, Generic, TypeVar, cast
+
+RT = TypeVar('RT', random.Random, random.SystemRandom)
+
+class TestBasicOps(unittest.TestCase, Generic[RT]):
+    # Superclass with tests common to all generators.
+    # Subclasses must arrange for self.gen to retrieve the Random instance
+    # to be tested.
+
+    gen = None  # type: RT  # Either Random or SystemRandom
+
+    def randomlist(self, n: int) -> List[float]:
+        """Helper function to make a list of random numbers"""
+        return [self.gen.random() for i in range(n)]
+
+    def test_autoseed(self) -> None:
+        self.gen.seed()
+        state1 = self.gen.getstate()
+        time.sleep(0.1)
+        self.gen.seed()      # diffent seeds at different times
+        state2 = self.gen.getstate()
+        self.assertNotEqual(state1, state2)
+
+    def test_saverestore(self) -> None:
+        N = 1000
+        self.gen.seed()
+        state = self.gen.getstate()
+        randseq = self.randomlist(N)
+        self.gen.setstate(state)    # should regenerate the same sequence
+        self.assertEqual(randseq, self.randomlist(N))
+
+    def test_seedargs(self) -> None:
+        for arg in [None, 0, 0, 1, 1, -1, -1, 10**20, -(10**20),
+                    3.14, complex(1., 2.), 'a', tuple('abc')]:
+            self.gen.seed(arg)
+        for arg in [list(range(3)), {'one': 1}]:
+            self.assertRaises(TypeError, self.gen.seed, arg)
+        self.assertRaises(TypeError, self.gen.seed, 1, 2, 3, 4)
+        self.assertRaises(TypeError, type(self.gen), [])  # type: ignore  # mypy issue 1846
+
+    def test_choice(self) -> None:
+        choice = self.gen.choice
+        with self.assertRaises(IndexError):
+            choice([])
+        self.assertEqual(choice([50]), 50)
+        self.assertIn(choice([25, 75]), [25, 75])
+
+    def test_sample(self) -> None:
+        # For the entire allowable range of 0 <= k <= N, validate that
+        # the sample is of the correct length and contains only unique items
+        N = 100
+        population = range(N)
+        for k in range(N+1):
+            s = self.gen.sample(population, k)
+            self.assertEqual(len(s), k)
+            uniq = set(s)
+            self.assertEqual(len(uniq), k)
+            self.assertTrue(uniq <= set(population))
+        self.assertEqual(self.gen.sample([], 0), [])  # test edge case N==k==0
+
+    def test_sample_distribution(self) -> None:
+        # For the entire allowable range of 0 <= k <= N, validate that
+        # sample generates all possible permutations
+        n = 5
+        pop = range(n)
+        trials = 10000  # large num prevents false negatives without slowing normal case
+        def factorial(n: int) -> int:
+            if n == 0:
+                return 1
+            return n * factorial(n - 1)
+        for k in range(n):
+            expected = factorial(n) // factorial(n-k)
+            perms = {}  # type: Dict[tuple, object]
+            for i in range(trials):
+                perms[tuple(self.gen.sample(pop, k))] = None
+                if len(perms) == expected:
+                    break
+            else:
+                self.fail()
+
+    def test_sample_inputs(self) -> None:
+        # SF bug #801342 -- population can be any iterable defining __len__()
+        self.gen.sample(set(range(20)), 2)
+        self.gen.sample(range(20), 2)
+        self.gen.sample(range(20), 2)
+        self.gen.sample(str('abcdefghijklmnopqrst'), 2)
+        self.gen.sample(tuple('abcdefghijklmnopqrst'), 2)
+
+    def test_sample_on_dicts(self) -> None:
+        self.assertRaises(TypeError, self.gen.sample, dict.fromkeys('abcdef'), 2)
+
+    def test_gauss(self) -> None:
+        # Ensure that the seed() method initializes all the hidden state.  In
+        # particular, through 2.2.1 it failed to reset a piece of state used
+        # by (and only by) the .gauss() method.
+
+        for seed in 1, 12, 123, 1234, 12345, 123456, 654321:
+            self.gen.seed(seed)
+            x1 = self.gen.random()
+            y1 = self.gen.gauss(0, 1)
+
+            self.gen.seed(seed)
+            x2 = self.gen.random()
+            y2 = self.gen.gauss(0, 1)
+
+            self.assertEqual(x1, x2)
+            self.assertEqual(y1, y2)
+
+    def test_pickling(self) -> None:
+        state = pickle.dumps(self.gen)
+        origseq = [self.gen.random() for i in range(10)]
+        newgen = pickle.loads(state)
+        restoredseq = [newgen.random() for i in range(10)]
+        self.assertEqual(origseq, restoredseq)
+
+    def test_bug_1727780(self) -> None:
+        # verify that version-2-pickles can be loaded
+        # fine, whether they are created on 32-bit or 64-bit
+        # platforms, and that version-3-pickles load fine.
+        files = [("randv2_32.pck", 780),
+                 ("randv2_64.pck", 866),
+                 ("randv3.pck", 343)]
+        for file, value in files:
+            f = open(support.findfile(file),"rb")
+            r = pickle.load(f)
+            f.close()
+            self.assertEqual(int(r.random()*1000), value)
+
+    def test_bug_9025(self) -> None:
+        # Had problem with an uneven distribution in int(n*random())
+        # Verify the fix by checking that distributions fall within expectations.
+        n = 100000
+        randrange = self.gen.randrange
+        k = sum(randrange(6755399441055744) % 3 == 2 for i in range(n))
+        self.assertTrue(0.30 < k/n and k/n < .37, (k/n))
+
+class SystemRandom_TestBasicOps(TestBasicOps[random.SystemRandom]):
+    gen = random.SystemRandom()
+
+    def test_autoseed(self) -> None:
+        # Doesn't need to do anything except not fail
+        self.gen.seed()
+
+    def test_saverestore(self) -> None:
+        self.assertRaises(NotImplementedError, self.gen.getstate)
+        self.assertRaises(NotImplementedError, self.gen.setstate, None)
+
+    def test_seedargs(self) -> None:
+        # Doesn't need to do anything except not fail
+        self.gen.seed(100)
+
+    def test_gauss(self) -> None:
+        self.gen.gauss_next = None
+        self.gen.seed(100)
+        self.assertEqual(self.gen.gauss_next, None)
+
+    def test_pickling(self) -> None:
+        self.assertRaises(NotImplementedError, pickle.dumps, self.gen)
+
+    def test_53_bits_per_float(self) -> None:
+        # This should pass whenever a C double has 53 bit precision.
+        span = 2 ** 53 # type: int
+        cum = 0
+        for i in range(100):
+            cum |= int(self.gen.random() * span)
+        self.assertEqual(cum, span-1)
+
+    def test_bigrand(self) -> None:
+        # The randrange routine should build-up the required number of bits
+        # in stages so that all bit positions are active.
+        span = 2 ** 500 # type: int
+        cum = 0
+        for i in range(100):
+            r = self.gen.randrange(span)
+            self.assertTrue(0 <= r < span)
+            cum |= r
+        self.assertEqual(cum, span-1)
+
+    def test_bigrand_ranges(self) -> None:
+        for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
+            start = self.gen.randrange(2 ** i)
+            stop = self.gen.randrange(2 ** (i-2))
+            if stop <= start:
+                return
+            self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
+
+    def test_rangelimits(self) -> None:
+        for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
+            self.assertEqual(set(range(start,stop)),
+                set([self.gen.randrange(start,stop) for i in range(100)]))
+
+    def test_genrandbits(self) -> None:
+        # Verify ranges
+        for k in range(1, 1000):
+            self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
+
+        # Verify all bits active
+        getbits = self.gen.getrandbits
+        for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
+            cum = 0
+            for i in range(100):
+                cum |= getbits(span)
+            self.assertEqual(cum, 2**span-1)
+
+        # Verify argument checking
+        self.assertRaises(TypeError, self.gen.getrandbits)
+        self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
+        self.assertRaises(ValueError, self.gen.getrandbits, 0)
+        self.assertRaises(ValueError, self.gen.getrandbits, -1)
+        self.assertRaises(TypeError, self.gen.getrandbits, 10.1)
+
+    def test_randbelow_logic(self, _log: Callable[[float, float], float] = log,
+                             int: Callable[[float], int] = int) -> None:
+        # check bitcount transition points:  2**i and 2**(i+1)-1
+        # show that: k = int(1.001 + _log(n, 2))
+        # is equal to or one greater than the number of bits in n
+        for i in range(1, 1000):
+            n = 1 << i # check an exact power of two
+            numbits = i+1
+            k = int(1.00001 + _log(n, 2))
+            self.assertEqual(k, numbits)
+            self.assertEqual(n, 2**(k-1))
+
+            n += n - 1      # check 1 below the next power of two
+            k = int(1.00001 + _log(n, 2))
+            self.assertIn(k, [numbits, numbits+1])
+            self.assertTrue(2**k > n > 2**(k-2))
+
+            n -= n >> 15     # check a little farther below the next power of two
+            k = int(1.00001 + _log(n, 2))
+            self.assertEqual(k, numbits)        # note the stronger assertion
+            self.assertTrue(2**k > n > 2**(k-1))   # note the stronger assertion
+
+
+class MersenneTwister_TestBasicOps(TestBasicOps[random.Random]):
+    gen = random.Random()
+
+    def test_guaranteed_stable(self) -> None:
+        # These sequences are guaranteed to stay the same across versions of python
+        self.gen.seed(3456147, version=1)
+        self.assertEqual([self.gen.random().hex() for i in range(4)],
+            ['0x1.ac362300d90d2p-1', '0x1.9d16f74365005p-1',
+             '0x1.1ebb4352e4c4dp-1', '0x1.1a7422abf9c11p-1'])
+        self.gen.seed("the quick brown fox", version=2)
+        self.assertEqual([self.gen.random().hex() for i in range(4)],
+            ['0x1.1239ddfb11b7cp-3', '0x1.b3cbb5c51b120p-4',
+             '0x1.8c4f55116b60fp-1', '0x1.63eb525174a27p-1'])
+
+    def test_setstate_first_arg(self) -> None:
+        self.assertRaises(ValueError, self.gen.setstate, (1, None, None))
+
+    def test_setstate_middle_arg(self) -> None:
+        # Wrong type, s/b tuple
+        self.assertRaises(TypeError, self.gen.setstate, (2, None, None))
+        # Wrong length, s/b 625
+        self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None))
+        # Wrong type, s/b tuple of 625 ints
+        self.assertRaises(TypeError, self.gen.setstate, (2, tuple(['a',]*625), None))
+        # Last element s/b an int also
+        self.assertRaises(TypeError, self.gen.setstate, (2, cast(Any, (0,))*624+('a',), None))
+
+    def test_referenceImplementation(self) -> None:
+        # Compare the python implementation with results from the original
+        # code.  Create 2000 53-bit precision random floats.  Compare only
+        # the last ten entries to show that the independent implementations
+        # are tracking.  Here is the main() function needed to create the
+        # list of expected random numbers:
+        #    void main(void){
+        #         int i;
+        #         unsigned long init[4]={61731, 24903, 614, 42143}, length=4;
+        #         init_by_array(init, length);
+        #         for (i=0; i<2000; i++) {
+        #           printf("%.15f ", genrand_res53());
+        #           if (i%5==4) printf("\n");
+        #         }
+        #     }
+        expected = [0.45839803073713259,
+                    0.86057815201978782,
+                    0.92848331726782152,
+                    0.35932681119782461,
+                    0.081823493762449573,
+                    0.14332226470169329,
+                    0.084297823823520024,
+                    0.53814864671831453,
+                    0.089215024911993401,
+                    0.78486196105372907]
+
+        self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96))
+        actual = self.randomlist(2000)[-10:]
+        for a, e in zip(actual, expected):
+            self.assertAlmostEqual(a,e,places=14)
+
+    def test_strong_reference_implementation(self) -> None:
+        # Like test_referenceImplementation, but checks for exact bit-level
+        # equality.  This should pass on any box where C double contains
+        # at least 53 bits of precision (the underlying algorithm suffers
+        # no rounding errors -- all results are exact).
+        from math import ldexp
+
+        expected = [0x0eab3258d2231f,
+                    0x1b89db315277a5,
+                    0x1db622a5518016,
+                    0x0b7f9af0d575bf,
+                    0x029e4c4db82240,
+                    0x04961892f5d673,
+                    0x02b291598e4589,
+                    0x11388382c15694,
+                    0x02dad977c9e1fe,
+                    0x191d96d4d334c6]
+        self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96))
+        actual = self.randomlist(2000)[-10:]
+        for a, e in zip(actual, expected):
+            self.assertEqual(int(ldexp(a, 53)), e)
+
+    def test_long_seed(self) -> None:
+        # This is most interesting to run in debug mode, just to make sure
+        # nothing blows up.  Under the covers, a dynamically resized array
+        # is allocated, consuming space proportional to the number of bits
+        # in the seed.  Unfortunately, that's a quadratic-time algorithm,
+        # so don't make this horribly big.
+        seed = (1 << (10000 * 8)) - 1  # about 10K bytes
+        self.gen.seed(seed)
+
+    def test_53_bits_per_float(self) -> None:
+        # This should pass whenever a C double has 53 bit precision.
+        span = 2 ** 53 # type: int
+        cum = 0
+        for i in range(100):
+            cum |= int(self.gen.random() * span)
+        self.assertEqual(cum, span-1)
+
+    def test_bigrand(self) -> None:
+        # The randrange routine should build-up the required number of bits
+        # in stages so that all bit positions are active.
+        span = 2 ** 500 # type: int
+        cum = 0
+        for i in range(100):
+            r = self.gen.randrange(span)
+            self.assertTrue(0 <= r < span)
+            cum |= r
+        self.assertEqual(cum, span-1)
+
+    def test_bigrand_ranges(self) -> None:
+        for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
+            start = self.gen.randrange(2 ** i)
+            stop = self.gen.randrange(2 ** (i-2))
+            if stop <= start:
+                return
+            self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
+
+    def test_rangelimits(self) -> None:
+        for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
+            self.assertEqual(set(range(start,stop)),
+                set([self.gen.randrange(start,stop) for i in range(100)]))
+
+    def test_genrandbits(self) -> None:
+        # Verify cross-platform repeatability
+        self.gen.seed(1234567)
+        self.assertEqual(self.gen.getrandbits(100),
+                         97904845777343510404718956115)
+        # Verify ranges
+        for k in range(1, 1000):
+            self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
+
+        # Verify all bits active
+        getbits = self.gen.getrandbits
+        for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
+            cum = 0
+            for i in range(100):
+                cum |= getbits(span)
+            self.assertEqual(cum, 2**span-1)
+
+        # Verify argument checking
+        self.assertRaises(TypeError, self.gen.getrandbits)
+        self.assertRaises(TypeError, self.gen.getrandbits, 'a')
+        self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
+        self.assertRaises(ValueError, self.gen.getrandbits, 0)
+        self.assertRaises(ValueError, self.gen.getrandbits, -1)
+
+    def test_randbelow_logic(self,
+                             _log: Callable[[int, float], float] = log,
+                             int: Callable[[float], int] = int) -> None:
+        # check bitcount transition points:  2**i and 2**(i+1)-1
+        # show that: k = int(1.001 + _log(n, 2))
+        # is equal to or one greater than the number of bits in n
+        for i in range(1, 1000):
+            n = 1 << i # check an exact power of two
+            numbits = i+1
+            k = int(1.00001 + _log(n, 2))
+            self.assertEqual(k, numbits)
+            self.assertEqual(n, 2**(k-1))
+
+            n += n - 1      # check 1 below the next power of two
+            k = int(1.00001 + _log(n, 2))
+            self.assertIn(k, [numbits, numbits+1])
+            self.assertTrue(2**k > n > 2**(k-2))
+
+            n -= n >> 15     # check a little farther below the next power of two
+            k = int(1.00001 + _log(n, 2))
+            self.assertEqual(k, numbits)        # note the stronger assertion
+            self.assertTrue(2**k > n > 2**(k-1))   # note the stronger assertion
+
+    def test_randrange_bug_1590891(self) -> None:
+        start = 1000000000000
+        stop = -100000000000000000000
+        step = -200
+        x = self.gen.randrange(start, stop, step)
+        self.assertTrue(stop < x <= start)
+        self.assertEqual((x+stop)%step, 0)
+
+def gamma(z: float, sqrt2pi: float = (2.0*pi)**0.5) -> float:
+    # Reflection to right half of complex plane
+    if z < 0.5:
+        return pi / sin(pi*z) / gamma(1.0-z)
+    # Lanczos approximation with g=7
+    az = z + (7.0 - 0.5)
+    return az ** (z-0.5) / exp(az) * sqrt2pi * fsum([
+        0.9999999999995183,
+        676.5203681218835 / z,
+        -1259.139216722289 / (z+1.0),
+        771.3234287757674 / (z+2.0),
+        -176.6150291498386 / (z+3.0),
+        12.50734324009056 / (z+4.0),
+        -0.1385710331296526 / (z+5.0),
+        0.9934937113930748e-05 / (z+6.0),
+        0.1659470187408462e-06 / (z+7.0),
+    ])
+
+class TestDistributions(unittest.TestCase):
+    def test_zeroinputs(self) -> None:
+        # Verify that distributions can handle a series of zero inputs'
+        g = random.Random()
+        x = [g.random() for i in range(50)] + [0.0]*5
+        def patch() -> None:
+            setattr(g, 'random', x[:].pop)
+        patch(); g.uniform(1.0,10.0)
+        patch(); g.paretovariate(1.0)
+        patch(); g.expovariate(1.0)
+        patch(); g.weibullvariate(1.0, 1.0)
+        patch(); g.normalvariate(0.0, 1.0)
+        patch(); g.gauss(0.0, 1.0)
+        patch(); g.lognormvariate(0.0, 1.0)
+        patch(); g.vonmisesvariate(0.0, 1.0)
+        patch(); g.gammavariate(0.01, 1.0)
+        patch(); g.gammavariate(1.0, 1.0)
+        patch(); g.gammavariate(200.0, 1.0)
+        patch(); g.betavariate(3.0, 3.0)
+        patch(); g.triangular(0.0, 1.0, 1.0/3.0)
+
+    def test_avg_std(self) -> None:
+        # Use integration to test distribution average and standard deviation.
+        # Only works for distributions which do not consume variates in pairs
+        g = random.Random()
+        N = 5000
+        x = [i/float(N) for i in range(1,N)]
+        variate = None  # type: Any
+        for variate, args, mu, sigmasqrd in [
+                (g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12),
+                (g.triangular, (0.0, 1.0, 1.0/3.0), 4.0/9.0, 7.0/9.0/18.0),
+                (g.expovariate, (1.5,), 1/1.5, 1/1.5**2),
+                (g.paretovariate, (5.0,), 5.0/(5.0-1),
+                                  5.0/((5.0-1)**2*(5.0-2))),
+                (g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0),
+                                  gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]:
+            setattr(g, 'random', x[:].pop)
+            y = []  # type: List[float]
+            for i in range(len(x)):
+                try:
+                    y.append(variate(*args))
+                except IndexError:
+                    pass
+            s1 = s2 = 0.0
+            for e in y:
+                s1 += e
+                s2 += (e - mu) ** 2
+            N = len(y)
+            self.assertAlmostEqual(s1/N, mu, places=2)
+            self.assertAlmostEqual(s2/(N-1), sigmasqrd, places=2)
+
+class TestModule(unittest.TestCase):
+    def testMagicConstants(self) -> None:
+        self.assertAlmostEqual(random.NV_MAGICCONST, 1.71552776992141)
+        self.assertAlmostEqual(random.TWOPI, 6.28318530718)
+        self.assertAlmostEqual(random.LOG4, 1.38629436111989)
+        self.assertAlmostEqual(random.SG_MAGICCONST, 2.50407739677627)
+
+    def test__all__(self) -> None:
+        # tests validity but not completeness of the __all__ list
+        self.assertTrue(set(random.__all__) <= set(dir(random)))
+
+    def test_random_subclass_with_kwargs(self) -> None:
+        # SF bug #1486663 -- this used to erroneously raise a TypeError
+        class Subclass(random.Random):
+            def __init__(self, newarg: object = None) -> None:
+                random.Random.__init__(self)
+        Subclass(newarg=1)
+
+
+def test_main(verbose: bool = None) -> None:
+    testclasses =    [MersenneTwister_TestBasicOps,
+                      TestDistributions,
+                      TestModule]
+
+    try:
+        random.SystemRandom().random()
+    except NotImplementedError:
+        pass
+    else:
+        testclasses.append(SystemRandom_TestBasicOps)
+
+    support.run_unittest(*testclasses)
+
+    # verify reference counting
+    import sys
+    if verbose and hasattr(sys, "gettotalrefcount"):
+        counts = [None] * 5 # type: List[int]
+        for i in range(len(counts)):
+            support.run_unittest(*testclasses)
+            counts[i] = sys.gettotalrefcount()
+        print(counts)
+
+if __name__ == "__main__":
+    test_main(verbose=True)
diff --git a/test-data/stdlib-samples/3.2/test/test_set.py b/test-data/stdlib-samples/3.2/test/test_set.py
new file mode 100644
index 0000000..23ae745
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_set.py
@@ -0,0 +1,1884 @@
+import unittest
+from test import support
+import gc
+import weakref
+import operator
+import copy
+import pickle
+from random import randrange, shuffle
+import sys
+import warnings
+import collections
+from typing import Set, Any
+
+class PassThru(Exception):
+    pass
+
+def check_pass_thru():
+    raise PassThru
+    yield 1
+
+class BadCmp:
+    def __hash__(self):
+        return 1
+    def __eq__(self, other):
+        raise RuntimeError
+
+class ReprWrapper:
+    'Used to test self-referential repr() calls'
+    def __repr__(self):
+        return repr(self.value)
+
+#class HashCountingInt(int):
+#    'int-like object that counts the number of times __hash__ is called'
+#    def __init__(self, *args):
+#        self.hash_count = 0
+#    def __hash__(self):
+#        self.hash_count += 1
+#        return int.__hash__(self)
+
+class TestJointOps(unittest.TestCase):
+    # Tests common to both set and frozenset
+
+    def setUp(self):
+        self.word = word = 'simsalabim'
+        self.otherword = 'madagascar'
+        self.letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+        self.s = self.thetype(word)
+        self.d = dict.fromkeys(word)
+
+    def test_new_or_init(self):
+        self.assertRaises(TypeError, self.thetype, [], 2)
+        self.assertRaises(TypeError, set().__init__, a=1)
+
+    def test_uniquification(self):
+        actual = sorted(self.s)
+        expected = sorted(self.d)
+        self.assertEqual(actual, expected)
+        self.assertRaises(PassThru, self.thetype, check_pass_thru())
+        self.assertRaises(TypeError, self.thetype, [[]])
+
+    def test_len(self):
+        self.assertEqual(len(self.s), len(self.d))
+
+    def test_contains(self):
+        for c in self.letters:
+            self.assertEqual(c in self.s, c in self.d)
+        self.assertRaises(TypeError, self.s.__contains__, [[]])
+        s = self.thetype([frozenset(self.letters)])
+        self.assertIn(self.thetype(self.letters), s)
+
+    def test_union(self):
+        u = self.s.union(self.otherword)
+        for c in self.letters:
+            self.assertEqual(c in u, c in self.d or c in self.otherword)
+        self.assertEqual(self.s, self.thetype(self.word))
+        self.assertEqual(type(u), self.basetype)
+        self.assertRaises(PassThru, self.s.union, check_pass_thru())
+        self.assertRaises(TypeError, self.s.union, [[]])
+        for C in set, frozenset, dict.fromkeys, str, list, tuple:
+            self.assertEqual(self.thetype('abcba').union(C('cdc')), set('abcd'))
+            self.assertEqual(self.thetype('abcba').union(C('efgfe')), set('abcefg'))
+            self.assertEqual(self.thetype('abcba').union(C('ccb')), set('abc'))
+            self.assertEqual(self.thetype('abcba').union(C('ef')), set('abcef'))
+            self.assertEqual(self.thetype('abcba').union(C('ef'), C('fg')), set('abcefg'))
+
+        # Issue #6573
+        x = self.thetype()
+        self.assertEqual(x.union(set([1]), x, set([2])), self.thetype([1, 2]))
+
+    def test_or(self):
+        i = self.s.union(self.otherword)
+        self.assertEqual(self.s | set(self.otherword), i)
+        self.assertEqual(self.s | frozenset(self.otherword), i)
+        try:
+            self.s | self.otherword
+        except TypeError:
+            pass
+        else:
+            self.fail("s|t did not screen-out general iterables")
+
+    def test_intersection(self):
+        i = self.s.intersection(self.otherword)
+        for c in self.letters:
+            self.assertEqual(c in i, c in self.d and c in self.otherword)
+        self.assertEqual(self.s, self.thetype(self.word))
+        self.assertEqual(type(i), self.basetype)
+        self.assertRaises(PassThru, self.s.intersection, check_pass_thru())
+        for C in set, frozenset, dict.fromkeys, str, list, tuple:
+            self.assertEqual(self.thetype('abcba').intersection(C('cdc')), set('cc'))
+            self.assertEqual(self.thetype('abcba').intersection(C('efgfe')), set(''))
+            self.assertEqual(self.thetype('abcba').intersection(C('ccb')), set('bc'))
+            self.assertEqual(self.thetype('abcba').intersection(C('ef')), set(''))
+            self.assertEqual(self.thetype('abcba').intersection(C('cbcf'), C('bag')), set('b'))
+        s = self.thetype('abcba')
+        z = s.intersection()
+        if self.thetype == frozenset():
+            self.assertEqual(id(s), id(z))
+        else:
+            self.assertNotEqual(id(s), id(z))
+
+    def test_isdisjoint(self):
+        def f(s1, s2):
+            'Pure python equivalent of isdisjoint()'
+            return not set(s1).intersection(s2)
+        for larg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef':
+            s1 = self.thetype(larg)
+            for rarg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef':
+                for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                    s2 = C(rarg)
+                    actual = s1.isdisjoint(s2)
+                    expected = f(s1, s2)
+                    self.assertEqual(actual, expected)
+                    self.assertTrue(actual is True or actual is False)
+
+    def test_and(self):
+        i = self.s.intersection(self.otherword)
+        self.assertEqual(self.s & set(self.otherword), i)
+        self.assertEqual(self.s & frozenset(self.otherword), i)
+        try:
+            self.s & self.otherword
+        except TypeError:
+            pass
+        else:
+            self.fail("s&t did not screen-out general iterables")
+
+    def test_difference(self):
+        i = self.s.difference(self.otherword)
+        for c in self.letters:
+            self.assertEqual(c in i, c in self.d and c not in self.otherword)
+        self.assertEqual(self.s, self.thetype(self.word))
+        self.assertEqual(type(i), self.basetype)
+        self.assertRaises(PassThru, self.s.difference, check_pass_thru())
+        self.assertRaises(TypeError, self.s.difference, [[]])
+        for C in set, frozenset, dict.fromkeys, str, list, tuple:
+            self.assertEqual(self.thetype('abcba').difference(C('cdc')), set('ab'))
+            self.assertEqual(self.thetype('abcba').difference(C('efgfe')), set('abc'))
+            self.assertEqual(self.thetype('abcba').difference(C('ccb')), set('a'))
+            self.assertEqual(self.thetype('abcba').difference(C('ef')), set('abc'))
+            self.assertEqual(self.thetype('abcba').difference(), set('abc'))
+            self.assertEqual(self.thetype('abcba').difference(C('a'), C('b')), set('c'))
+
+    def test_sub(self):
+        i = self.s.difference(self.otherword)
+        self.assertEqual(self.s - set(self.otherword), i)
+        self.assertEqual(self.s - frozenset(self.otherword), i)
+        try:
+            self.s - self.otherword
+        except TypeError:
+            pass
+        else:
+            self.fail("s-t did not screen-out general iterables")
+
+    def test_symmetric_difference(self):
+        i = self.s.symmetric_difference(self.otherword)
+        for c in self.letters:
+            self.assertEqual(c in i, (c in self.d) ^ (c in self.otherword))
+        self.assertEqual(self.s, self.thetype(self.word))
+        self.assertEqual(type(i), self.basetype)
+        self.assertRaises(PassThru, self.s.symmetric_difference, check_pass_thru())
+        self.assertRaises(TypeError, self.s.symmetric_difference, [[]])
+        for C in set, frozenset, dict.fromkeys, str, list, tuple:
+            self.assertEqual(self.thetype('abcba').symmetric_difference(C('cdc')), set('abd'))
+            self.assertEqual(self.thetype('abcba').symmetric_difference(C('efgfe')), set('abcefg'))
+            self.assertEqual(self.thetype('abcba').symmetric_difference(C('ccb')), set('a'))
+            self.assertEqual(self.thetype('abcba').symmetric_difference(C('ef')), set('abcef'))
+
+    def test_xor(self):
+        i = self.s.symmetric_difference(self.otherword)
+        self.assertEqual(self.s ^ set(self.otherword), i)
+        self.assertEqual(self.s ^ frozenset(self.otherword), i)
+        try:
+            self.s ^ self.otherword
+        except TypeError:
+            pass
+        else:
+            self.fail("s^t did not screen-out general iterables")
+
+    def test_equality(self):
+        self.assertEqual(self.s, set(self.word))
+        self.assertEqual(self.s, frozenset(self.word))
+        self.assertEqual(self.s == self.word, False)
+        self.assertNotEqual(self.s, set(self.otherword))
+        self.assertNotEqual(self.s, frozenset(self.otherword))
+        self.assertEqual(self.s != self.word, True)
+
+    def test_setOfFrozensets(self):
+        t = map(frozenset, ['abcdef', 'bcd', 'bdcb', 'fed', 'fedccba'])
+        s = self.thetype(t)
+        self.assertEqual(len(s), 3)
+
+    def test_sub_and_super(self):
+        p, q, r = map(self.thetype, ['ab', 'abcde', 'def'])
+        self.assertTrue(p < q)
+        self.assertTrue(p <= q)
+        self.assertTrue(q <= q)
+        self.assertTrue(q > p)
+        self.assertTrue(q >= p)
+        self.assertFalse(q < r)
+        self.assertFalse(q <= r)
+        self.assertFalse(q > r)
+        self.assertFalse(q >= r)
+        self.assertTrue(set('a').issubset('abc'))
+        self.assertTrue(set('abc').issuperset('a'))
+        self.assertFalse(set('a').issubset('cbs'))
+        self.assertFalse(set('cbs').issuperset('a'))
+
+    def test_pickling(self):
+        for i in range(pickle.HIGHEST_PROTOCOL + 1):
+            p = pickle.dumps(self.s, i)
+            dup = pickle.loads(p)
+            self.assertEqual(self.s, dup, "%s != %s" % (self.s, dup))
+            if type(self.s) not in (set, frozenset):
+                self.s.x = 10
+                p = pickle.dumps(self.s)
+                dup = pickle.loads(p)
+                self.assertEqual(self.s.x, dup.x)
+
+    def test_deepcopy(self):
+        class Tracer:
+            def __init__(self, value):
+                self.value = value
+            def __hash__(self):
+                return self.value
+            def __deepcopy__(self, memo=None):
+                return Tracer(self.value + 1)
+        t = Tracer(10)
+        s = self.thetype([t])
+        dup = copy.deepcopy(s)
+        self.assertNotEqual(id(s), id(dup))
+        for elem in dup:
+            newt = elem
+        self.assertNotEqual(id(t), id(newt))
+        self.assertEqual(t.value + 1, newt.value)
+
+    def test_gc(self):
+        # Create a nest of cycles to exercise overall ref count check
+        class A:
+            pass
+        s = set(A() for i in range(1000))
+        for elem in s:
+            elem.cycle = s
+            elem.sub = elem
+            elem.set = set([elem])
+
+    def test_subclass_with_custom_hash(self):
+        raise NotImplementedError() # runtime computed base class below
+        # Bug #1257731
+        class H: # (self.thetype):
+            def __hash__(self):
+                return int(id(self) & 0x7fffffff)
+        s=H()
+        f=set()
+        f.add(s)
+        self.assertIn(s, f)
+        f.remove(s)
+        f.add(s)
+        f.discard(s)
+
+    def test_badcmp(self):
+        s = self.thetype([BadCmp()])
+        # Detect comparison errors during insertion and lookup
+        self.assertRaises(RuntimeError, self.thetype, [BadCmp(), BadCmp()])
+        self.assertRaises(RuntimeError, s.__contains__, BadCmp())
+        # Detect errors during mutating operations
+        if hasattr(s, 'add'):
+            self.assertRaises(RuntimeError, s.add, BadCmp())
+            self.assertRaises(RuntimeError, s.discard, BadCmp())
+            self.assertRaises(RuntimeError, s.remove, BadCmp())
+
+    def test_cyclical_repr(self):
+        w = ReprWrapper()
+        s = self.thetype([w])
+        w.value = s
+        if self.thetype == set:
+            self.assertEqual(repr(s), '{set(...)}')
+        else:
+            name = repr(s).partition('(')[0]    # strip class name
+            self.assertEqual(repr(s), '%s({%s(...)})' % (name, name))
+
+    def test_cyclical_print(self):
+        w = ReprWrapper()
+        s = self.thetype([w])
+        w.value = s
+        fo = open(support.TESTFN, "w")
+        try:
+            fo.write(str(s))
+            fo.close()
+            fo = open(support.TESTFN, "r")
+            self.assertEqual(fo.read(), repr(s))
+        finally:
+            fo.close()
+            support.unlink(support.TESTFN)
+
+    def test_do_not_rehash_dict_keys(self):
+        raise NotImplementedError() # cannot subclass int
+        n = 10
+        d = None # dict.fromkeys(map(HashCountingInt, range(n)))
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        s = self.thetype(d)
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        s.difference(d)
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        if hasattr(s, 'symmetric_difference_update'):
+            s.symmetric_difference_update(d)
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        d2 = dict.fromkeys(set(d))
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        d3 = dict.fromkeys(frozenset(d))
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        d3 = dict.fromkeys(frozenset(d), 123)
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        self.assertEqual(d3, dict.fromkeys(d, 123))
+
+    def test_container_iterator(self):
+        # Bug #3680: tp_traverse was not implemented for set iterator object
+        class C(object):
+            pass
+        obj = C()
+        ref = weakref.ref(obj)
+        container = set([obj, 1])
+        obj.x = iter(container)
+        obj = None
+        container = None
+        gc.collect()
+        self.assertTrue(ref() is None, "Cycle was not collected")
+
+class TestSet(TestJointOps):
+    thetype = set
+    basetype = set
+
+    def test_init(self):
+        s = self.thetype()
+        s.__init__(self.word)
+        self.assertEqual(s, set(self.word))
+        s.__init__(self.otherword)
+        self.assertEqual(s, set(self.otherword))
+        self.assertRaises(TypeError, s.__init__, s, 2);
+        self.assertRaises(TypeError, s.__init__, 1)
+
+    def test_constructor_identity(self):
+        s = self.thetype(range(3))
+        t = self.thetype(s)
+        self.assertNotEqual(id(s), id(t))
+
+    def test_set_literal(self):
+        raise NotImplementedError()
+        #s = set([1,2,3])
+        #t = {1,2,3}
+        #self.assertEqual(s, t)
+
+    def test_hash(self):
+        self.assertRaises(TypeError, hash, self.s)
+
+    def test_clear(self):
+        self.s.clear()
+        self.assertEqual(self.s, set())
+        self.assertEqual(len(self.s), 0)
+
+    def test_copy(self):
+        dup = self.s.copy()
+        self.assertEqual(self.s, dup)
+        self.assertNotEqual(id(self.s), id(dup))
+        self.assertEqual(type(dup), self.basetype)
+
+    def test_add(self):
+        self.s.add('Q')
+        self.assertIn('Q', self.s)
+        dup = self.s.copy()
+        self.s.add('Q')
+        self.assertEqual(self.s, dup)
+        self.assertRaises(TypeError, self.s.add, [])
+
+    def test_remove(self):
+        self.s.remove('a')
+        self.assertNotIn('a', self.s)
+        self.assertRaises(KeyError, self.s.remove, 'Q')
+        self.assertRaises(TypeError, self.s.remove, [])
+        s = self.thetype([frozenset(self.word)])
+        self.assertIn(self.thetype(self.word), s)
+        s.remove(self.thetype(self.word))
+        self.assertNotIn(self.thetype(self.word), s)
+        self.assertRaises(KeyError, self.s.remove, self.thetype(self.word))
+
+    def test_remove_keyerror_unpacking(self):
+        # bug:  www.python.org/sf/1576657
+        for v1 in ['Q', (1,)]:
+            try:
+                self.s.remove(v1)
+            except KeyError as e:
+                v2 = e.args[0]
+                self.assertEqual(v1, v2)
+            else:
+                self.fail()
+
+    def test_remove_keyerror_set(self):
+        key = self.thetype([3, 4])
+        try:
+            self.s.remove(key)
+        except KeyError as e:
+            self.assertTrue(e.args[0] is key,
+                         "KeyError should be {0}, not {1}".format(key,
+                                                                  e.args[0]))
+        else:
+            self.fail()
+
+    def test_discard(self):
+        self.s.discard('a')
+        self.assertNotIn('a', self.s)
+        self.s.discard('Q')
+        self.assertRaises(TypeError, self.s.discard, [])
+        s = self.thetype([frozenset(self.word)])
+        self.assertIn(self.thetype(self.word), s)
+        s.discard(self.thetype(self.word))
+        self.assertNotIn(self.thetype(self.word), s)
+        s.discard(self.thetype(self.word))
+
+    def test_pop(self):
+        for i in range(len(self.s)):
+            elem = self.s.pop()
+            self.assertNotIn(elem, self.s)
+        self.assertRaises(KeyError, self.s.pop)
+
+    def test_update(self):
+        retval = self.s.update(self.otherword)
+        self.assertEqual(retval, None)
+        for c in (self.word + self.otherword):
+            self.assertIn(c, self.s)
+        self.assertRaises(PassThru, self.s.update, check_pass_thru())
+        self.assertRaises(TypeError, self.s.update, [[]])
+        for p, q in (('cdc', 'abcd'), ('efgfe', 'abcefg'), ('ccb', 'abc'), ('ef', 'abcef')):
+            for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                s = self.thetype('abcba')
+                self.assertEqual(s.update(C(p)), None)
+                self.assertEqual(s, set(q))
+        for p in ('cdc', 'efgfe', 'ccb', 'ef', 'abcda'):
+            q = 'ahi'
+            for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                s = self.thetype('abcba')
+                self.assertEqual(s.update(C(p), C(q)), None)
+                self.assertEqual(s, set(s) | set(p) | set(q))
+
+    def test_ior(self):
+        self.s |= set(self.otherword)
+        for c in (self.word + self.otherword):
+            self.assertIn(c, self.s)
+
+    def test_intersection_update(self):
+        retval = self.s.intersection_update(self.otherword)
+        self.assertEqual(retval, None)
+        for c in (self.word + self.otherword):
+            if c in self.otherword and c in self.word:
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+        self.assertRaises(PassThru, self.s.intersection_update, check_pass_thru())
+        self.assertRaises(TypeError, self.s.intersection_update, [[]])
+        for p, q in (('cdc', 'c'), ('efgfe', ''), ('ccb', 'bc'), ('ef', '')):
+            for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                s = self.thetype('abcba')
+                self.assertEqual(s.intersection_update(C(p)), None)
+                self.assertEqual(s, set(q))
+                ss = 'abcba'
+                s = self.thetype(ss)
+                t = 'cbc'
+                self.assertEqual(s.intersection_update(C(p), C(t)), None)
+                self.assertEqual(s, set('abcba')&set(p)&set(t))
+
+    def test_iand(self):
+        self.s &= set(self.otherword)
+        for c in (self.word + self.otherword):
+            if c in self.otherword and c in self.word:
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+
+    def test_difference_update(self):
+        retval = self.s.difference_update(self.otherword)
+        self.assertEqual(retval, None)
+        for c in (self.word + self.otherword):
+            if c in self.word and c not in self.otherword:
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+        self.assertRaises(PassThru, self.s.difference_update, check_pass_thru())
+        self.assertRaises(TypeError, self.s.difference_update, [[]])
+        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])
+        for p, q in (('cdc', 'ab'), ('efgfe', 'abc'), ('ccb', 'a'), ('ef', 'abc')):
+            for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                s = self.thetype('abcba')
+                self.assertEqual(s.difference_update(C(p)), None)
+                self.assertEqual(s, set(q))
+
+                s = self.thetype('abcdefghih')
+                s.difference_update()
+                self.assertEqual(s, self.thetype('abcdefghih'))
+
+                s = self.thetype('abcdefghih')
+                s.difference_update(C('aba'))
+                self.assertEqual(s, self.thetype('cdefghih'))
+
+                s = self.thetype('abcdefghih')
+                s.difference_update(C('cdc'), C('aba'))
+                self.assertEqual(s, self.thetype('efghih'))
+
+    def test_isub(self):
+        self.s -= set(self.otherword)
+        for c in (self.word + self.otherword):
+            if c in self.word and c not in self.otherword:
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+
+    def test_symmetric_difference_update(self):
+        retval = self.s.symmetric_difference_update(self.otherword)
+        self.assertEqual(retval, None)
+        for c in (self.word + self.otherword):
+            if (c in self.word) ^ (c in self.otherword):
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+        self.assertRaises(PassThru, self.s.symmetric_difference_update, check_pass_thru())
+        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])
+        for p, q in (('cdc', 'abd'), ('efgfe', 'abcefg'), ('ccb', 'a'), ('ef', 'abcef')):
+            for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                s = self.thetype('abcba')
+                self.assertEqual(s.symmetric_difference_update(C(p)), None)
+                self.assertEqual(s, set(q))
+
+    def test_ixor(self):
+        self.s ^= set(self.otherword)
+        for c in (self.word + self.otherword):
+            if (c in self.word) ^ (c in self.otherword):
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+
+    def test_inplace_on_self(self):
+        t = self.s.copy()
+        t |= t
+        self.assertEqual(t, self.s)
+        t &= t
+        self.assertEqual(t, self.s)
+        t -= t
+        self.assertEqual(t, self.thetype())
+        t = self.s.copy()
+        t ^= t
+        self.assertEqual(t, self.thetype())
+
+    def test_weakref(self):
+        s = self.thetype('gallahad')
+        p = weakref.proxy(s)
+        self.assertEqual(str(p), str(s))
+        s = None
+        self.assertRaises(ReferenceError, str, p)
+
+    def test_rich_compare(self):
+        class TestRichSetCompare:
+            def __gt__(self, some_set):
+                self.gt_called = True
+                return False
+            def __lt__(self, some_set):
+                self.lt_called = True
+                return False
+            def __ge__(self, some_set):
+                self.ge_called = True
+                return False
+            def __le__(self, some_set):
+                self.le_called = True
+                return False
+
+        # This first tries the builtin rich set comparison, which doesn't know
+        # how to handle the custom object. Upon returning NotImplemented, the
+        # corresponding comparison on the right object is invoked.
+        myset = {1, 2, 3}
+
+        myobj = TestRichSetCompare()
+        myset < myobj
+        self.assertTrue(myobj.gt_called)
+
+        myobj = TestRichSetCompare()
+        myset > myobj
+        self.assertTrue(myobj.lt_called)
+
+        myobj = TestRichSetCompare()
+        myset <= myobj
+        self.assertTrue(myobj.ge_called)
+
+        myobj = TestRichSetCompare()
+        myset >= myobj
+        self.assertTrue(myobj.le_called)
+
+    # C API test only available in a debug build
+    if hasattr(set, "test_c_api"):
+        def test_c_api(self):
+            self.assertEqual(set().test_c_api(), True)
+
+class SetSubclass(set):
+    pass
+
+class TestSetSubclass(TestSet):
+    thetype = SetSubclass
+    basetype = set
+
+class SetSubclassWithKeywordArgs(set):
+    def __init__(self, iterable=[], newarg=None):
+        set.__init__(self, iterable)
+
+class TestSetSubclassWithKeywordArgs(TestSet):
+
+    def test_keywords_in_subclass(self):
+        'SF bug #1486663 -- this used to erroneously raise a TypeError'
+        SetSubclassWithKeywordArgs(newarg=1)
+
+class TestFrozenSet(TestJointOps):
+    thetype = frozenset
+    basetype = frozenset
+
+    def test_init(self):
+        s = self.thetype(self.word)
+        s.__init__(self.otherword)
+        self.assertEqual(s, set(self.word))
+
+    def test_singleton_empty_frozenset(self):
+        f = frozenset()
+        efs = [frozenset(), frozenset([]), frozenset(()), frozenset(''),
+               frozenset(), frozenset([]), frozenset(()), frozenset(''),
+               frozenset(range(0)), frozenset(frozenset()),
+               frozenset(f), f]
+        # All of the empty frozensets should have just one id()
+        self.assertEqual(len(set(map(id, efs))), 1)
+
+    def test_constructor_identity(self):
+        s = self.thetype(range(3))
+        t = self.thetype(s)
+        self.assertEqual(id(s), id(t))
+
+    def test_hash(self):
+        self.assertEqual(hash(self.thetype('abcdeb')),
+                         hash(self.thetype('ebecda')))
+
+        # make sure that all permutations give the same hash value
+        n = 100
+        seq = [randrange(n) for i in range(n)]
+        results = set()
+        for i in range(200):
+            shuffle(seq)
+            results.add(hash(self.thetype(seq)))
+        self.assertEqual(len(results), 1)
+
+    def test_copy(self):
+        dup = self.s.copy()
+        self.assertEqual(id(self.s), id(dup))
+
+    def test_frozen_as_dictkey(self):
+        seq = list(range(10)) + list('abcdefg') + ['apple']
+        key1 = self.thetype(seq)
+        key2 = self.thetype(reversed(seq))
+        self.assertEqual(key1, key2)
+        self.assertNotEqual(id(key1), id(key2))
+        d = {}
+        d[key1] = 42
+        self.assertEqual(d[key2], 42)
+
+    def test_hash_caching(self):
+        f = self.thetype('abcdcda')
+        self.assertEqual(hash(f), hash(f))
+
+    def test_hash_effectiveness(self):
+        n = 13
+        hashvalues = set()
+        addhashvalue = hashvalues.add
+        elemmasks = [(i+1, 1<<i) for i in range(n)]
+        for i in range(2**n):
+            addhashvalue(hash(frozenset([e for e, m in elemmasks if m&i])))
+        self.assertEqual(len(hashvalues), 2**n)
+
+class FrozenSetSubclass(frozenset):
+    pass
+
+class TestFrozenSetSubclass(TestFrozenSet):
+    thetype = FrozenSetSubclass
+    basetype = frozenset
+
+    def test_constructor_identity(self):
+        s = self.thetype(range(3))
+        t = self.thetype(s)
+        self.assertNotEqual(id(s), id(t))
+
+    def test_copy(self):
+        dup = self.s.copy()
+        self.assertNotEqual(id(self.s), id(dup))
+
+    def test_nested_empty_constructor(self):
+        s = self.thetype()
+        t = self.thetype(s)
+        self.assertEqual(s, t)
+
+    def test_singleton_empty_frozenset(self):
+        Frozenset = self.thetype
+        f = frozenset()
+        F = Frozenset()
+        efs = [Frozenset(), Frozenset([]), Frozenset(()), Frozenset(''),
+               Frozenset(), Frozenset([]), Frozenset(()), Frozenset(''),
+               Frozenset(range(0)), Frozenset(Frozenset()),
+               Frozenset(frozenset()), f, F, Frozenset(f), Frozenset(F)]
+        # All empty frozenset subclass instances should have different ids
+        self.assertEqual(len(set(map(id, efs))), len(efs))
+
+# Tests taken from test_sets.py =============================================
+
+empty_set = set() # type: Any
+
+#==============================================================================
+
+class TestBasicOps(unittest.TestCase):
+
+    def test_repr(self):
+        if self.repr is not None:
+            self.assertEqual(repr(self.set), self.repr)
+
+    def check_repr_against_values(self):
+        text = repr(self.set)
+        self.assertTrue(text.startswith('{'))
+        self.assertTrue(text.endswith('}'))
+
+        result = text[1:-1].split(', ')
+        result.sort()
+        sorted_repr_values = [repr(value) for value in self.values]
+        sorted_repr_values.sort()
+        self.assertEqual(result, sorted_repr_values)
+
+    def test_print(self):
+        try:
+            fo = open(support.TESTFN, "w")
+            fo.write(str(self.set))
+            fo.close()
+            fo = open(support.TESTFN, "r")
+            self.assertEqual(fo.read(), repr(self.set))
+        finally:
+            fo.close()
+            support.unlink(support.TESTFN)
+
+    def test_length(self):
+        self.assertEqual(len(self.set), self.length)
+
+    def test_self_equality(self):
+        self.assertEqual(self.set, self.set)
+
+    def test_equivalent_equality(self):
+        self.assertEqual(self.set, self.dup)
+
+    def test_copy(self):
+        self.assertEqual(self.set.copy(), self.dup)
+
+    def test_self_union(self):
+        result = self.set | self.set
+        self.assertEqual(result, self.dup)
+
+    def test_empty_union(self):
+        result = self.set | empty_set
+        self.assertEqual(result, self.dup)
+
+    def test_union_empty(self):
+        result = empty_set | self.set
+        self.assertEqual(result, self.dup)
+
+    def test_self_intersection(self):
+        result = self.set & self.set
+        self.assertEqual(result, self.dup)
+
+    def test_empty_intersection(self):
+        result = self.set & empty_set
+        self.assertEqual(result, empty_set)
+
+    def test_intersection_empty(self):
+        result = empty_set & self.set
+        self.assertEqual(result, empty_set)
+
+    def test_self_isdisjoint(self):
+        result = self.set.isdisjoint(self.set)
+        self.assertEqual(result, not self.set)
+
+    def test_empty_isdisjoint(self):
+        result = self.set.isdisjoint(empty_set)
+        self.assertEqual(result, True)
+
+    def test_isdisjoint_empty(self):
+        result = empty_set.isdisjoint(self.set)
+        self.assertEqual(result, True)
+
+    def test_self_symmetric_difference(self):
+        result = self.set ^ self.set
+        self.assertEqual(result, empty_set)
+
+    def test_empty_symmetric_difference(self):
+        result = self.set ^ empty_set
+        self.assertEqual(result, self.set)
+
+    def test_self_difference(self):
+        result = self.set - self.set
+        self.assertEqual(result, empty_set)
+
+    def test_empty_difference(self):
+        result = self.set - empty_set
+        self.assertEqual(result, self.dup)
+
+    def test_empty_difference_rev(self):
+        result = empty_set - self.set
+        self.assertEqual(result, empty_set)
+
+    def test_iteration(self):
+        for v in self.set:
+            self.assertIn(v, self.values)
+        setiter = iter(self.set)
+        # note: __length_hint__ is an internal undocumented API,
+        # don't rely on it in your own programs
+        self.assertEqual(setiter.__length_hint__(), len(self.set))
+
+    def test_pickling(self):
+        p = pickle.dumps(self.set)
+        copy = pickle.loads(p)
+        self.assertEqual(self.set, copy,
+                         "%s != %s" % (self.set, copy))
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsEmpty(TestBasicOps):
+    def setUp(self):
+        self.case   = "empty set"
+        self.values = []
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 0
+        self.repr   = "set()"
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsSingleton(TestBasicOps):
+    def setUp(self):
+        self.case   = "unit set (number)"
+        self.values = [3]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 1
+        self.repr   = "{3}"
+
+    def test_in(self):
+        self.assertIn(3, self.set)
+
+    def test_not_in(self):
+        self.assertNotIn(2, self.set)
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsTuple(TestBasicOps):
+    def setUp(self):
+        self.case   = "unit set (tuple)"
+        self.values = [(0, "zero")]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 1
+        self.repr   = "{(0, 'zero')}"
+
+    def test_in(self):
+        self.assertIn((0, "zero"), self.set)
+
+    def test_not_in(self):
+        self.assertNotIn(9, self.set)
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsTriple(TestBasicOps):
+    def setUp(self):
+        self.case   = "triple set"
+        self.values = [0, "zero", operator.add]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 3
+        self.repr   = None
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsString(TestBasicOps):
+    def setUp(self):
+        self.case   = "string set"
+        self.values = ["a", "b", "c"]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 3
+
+    def test_repr(self):
+        self.check_repr_against_values()
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsBytes(TestBasicOps):
+    def setUp(self):
+        self.case   = "string set"
+        self.values = [b"a", b"b", b"c"]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 3
+
+    def test_repr(self):
+        self.check_repr_against_values()
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsMixedStringBytes(TestBasicOps):
+    def setUp(self):
+        self._warning_filters = support.check_warnings()
+        self._warning_filters.__enter__()
+        warnings.simplefilter('ignore', BytesWarning)
+        self.case   = "string and bytes set"
+        self.values = ["a", "b", b"a", b"b"]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 4
+
+    def tearDown(self):
+        self._warning_filters.__exit__(None, None, None)
+
+    def test_repr(self):
+        self.check_repr_against_values()
+
+#==============================================================================
+
+def baditer():
+    raise TypeError
+    yield True
+
+def gooditer():
+    yield True
+
+class TestExceptionPropagation(unittest.TestCase):
+    """SF 628246:  Set constructor should not trap iterator TypeErrors"""
+
+    def test_instanceWithException(self):
+        self.assertRaises(TypeError, set, baditer())
+
+    def test_instancesWithoutException(self):
+        # All of these iterables should load without exception.
+        set([1,2,3])
+        set((1,2,3))
+        set({'one':1, 'two':2, 'three':3})
+        set(range(3))
+        set('abc')
+        set(gooditer())
+
+    def test_changingSizeWhileIterating(self):
+        s = set([1,2,3])
+        try:
+            for i in s:
+                s.update([4])
+        except RuntimeError:
+            pass
+        else:
+            self.fail("no exception when changing size during iteration")
+
+#==============================================================================
+
+class TestSetOfSets(unittest.TestCase):
+    def test_constructor(self):
+        inner = frozenset([1])
+        outer = set([inner])
+        element = outer.pop()
+        self.assertEqual(type(element), frozenset)
+        outer.add(inner)        # Rebuild set of sets with .add method
+        outer.remove(inner)
+        self.assertEqual(outer, set())   # Verify that remove worked
+        outer.discard(inner)    # Absence of KeyError indicates working fine
+
+#==============================================================================
+
+class TestBinaryOps(unittest.TestCase):
+    def setUp(self):
+        self.set = set((2, 4, 6))
+
+    def test_eq(self):              # SF bug 643115
+        self.assertEqual(self.set, set({2:1,4:3,6:5}))
+
+    def test_union_subset(self):
+        result = self.set | set([2])
+        self.assertEqual(result, set((2, 4, 6)))
+
+    def test_union_superset(self):
+        result = self.set | set([2, 4, 6, 8])
+        self.assertEqual(result, set([2, 4, 6, 8]))
+
+    def test_union_overlap(self):
+        result = self.set | set([3, 4, 5])
+        self.assertEqual(result, set([2, 3, 4, 5, 6]))
+
+    def test_union_non_overlap(self):
+        result = self.set | set([8])
+        self.assertEqual(result, set([2, 4, 6, 8]))
+
+    def test_intersection_subset(self):
+        result = self.set & set((2, 4))
+        self.assertEqual(result, set((2, 4)))
+
+    def test_intersection_superset(self):
+        result = self.set & set([2, 4, 6, 8])
+        self.assertEqual(result, set([2, 4, 6]))
+
+    def test_intersection_overlap(self):
+        result = self.set & set([3, 4, 5])
+        self.assertEqual(result, set([4]))
+
+    def test_intersection_non_overlap(self):
+        result = self.set & set([8])
+        self.assertEqual(result, empty_set)
+
+    def test_isdisjoint_subset(self):
+        result = self.set.isdisjoint(set((2, 4)))
+        self.assertEqual(result, False)
+
+    def test_isdisjoint_superset(self):
+        result = self.set.isdisjoint(set([2, 4, 6, 8]))
+        self.assertEqual(result, False)
+
+    def test_isdisjoint_overlap(self):
+        result = self.set.isdisjoint(set([3, 4, 5]))
+        self.assertEqual(result, False)
+
+    def test_isdisjoint_non_overlap(self):
+        result = self.set.isdisjoint(set([8]))
+        self.assertEqual(result, True)
+
+    def test_sym_difference_subset(self):
+        result = self.set ^ set((2, 4))
+        self.assertEqual(result, set([6]))
+
+    def test_sym_difference_superset(self):
+        result = self.set ^ set((2, 4, 6, 8))
+        self.assertEqual(result, set([8]))
+
+    def test_sym_difference_overlap(self):
+        result = self.set ^ set((3, 4, 5))
+        self.assertEqual(result, set([2, 3, 5, 6]))
+
+    def test_sym_difference_non_overlap(self):
+        result = self.set ^ set([8])
+        self.assertEqual(result, set([2, 4, 6, 8]))
+
+#==============================================================================
+
+class TestUpdateOps(unittest.TestCase):
+    def setUp(self):
+        self.set = set((2, 4, 6))
+
+    def test_union_subset(self):
+        self.set |= set([2])
+        self.assertEqual(self.set, set((2, 4, 6)))
+
+    def test_union_superset(self):
+        self.set |= set([2, 4, 6, 8])
+        self.assertEqual(self.set, set([2, 4, 6, 8]))
+
+    def test_union_overlap(self):
+        self.set |= set([3, 4, 5])
+        self.assertEqual(self.set, set([2, 3, 4, 5, 6]))
+
+    def test_union_non_overlap(self):
+        self.set |= set([8])
+        self.assertEqual(self.set, set([2, 4, 6, 8]))
+
+    def test_union_method_call(self):
+        self.set.update(set([3, 4, 5]))
+        self.assertEqual(self.set, set([2, 3, 4, 5, 6]))
+
+    def test_intersection_subset(self):
+        self.set &= set((2, 4))
+        self.assertEqual(self.set, set((2, 4)))
+
+    def test_intersection_superset(self):
+        self.set &= set([2, 4, 6, 8])
+        self.assertEqual(self.set, set([2, 4, 6]))
+
+    def test_intersection_overlap(self):
+        self.set &= set([3, 4, 5])
+        self.assertEqual(self.set, set([4]))
+
+    def test_intersection_non_overlap(self):
+        self.set &= set([8])
+        self.assertEqual(self.set, empty_set)
+
+    def test_intersection_method_call(self):
+        self.set.intersection_update(set([3, 4, 5]))
+        self.assertEqual(self.set, set([4]))
+
+    def test_sym_difference_subset(self):
+        self.set ^= set((2, 4))
+        self.assertEqual(self.set, set([6]))
+
+    def test_sym_difference_superset(self):
+        self.set ^= set((2, 4, 6, 8))
+        self.assertEqual(self.set, set([8]))
+
+    def test_sym_difference_overlap(self):
+        self.set ^= set((3, 4, 5))
+        self.assertEqual(self.set, set([2, 3, 5, 6]))
+
+    def test_sym_difference_non_overlap(self):
+        self.set ^= set([8])
+        self.assertEqual(self.set, set([2, 4, 6, 8]))
+
+    def test_sym_difference_method_call(self):
+        self.set.symmetric_difference_update(set([3, 4, 5]))
+        self.assertEqual(self.set, set([2, 3, 5, 6]))
+
+    def test_difference_subset(self):
+        self.set -= set((2, 4))
+        self.assertEqual(self.set, set([6]))
+
+    def test_difference_superset(self):
+        self.set -= set((2, 4, 6, 8))
+        self.assertEqual(self.set, set([]))
+
+    def test_difference_overlap(self):
+        self.set -= set((3, 4, 5))
+        self.assertEqual(self.set, set([2, 6]))
+
+    def test_difference_non_overlap(self):
+        self.set -= set([8])
+        self.assertEqual(self.set, set([2, 4, 6]))
+
+    def test_difference_method_call(self):
+        self.set.difference_update(set([3, 4, 5]))
+        self.assertEqual(self.set, set([2, 6]))
+
+#==============================================================================
+
+class TestMutate(unittest.TestCase):
+    def setUp(self):
+        self.values = ["a", "b", "c"]
+        self.set = set(self.values)
+
+    def test_add_present(self):
+        self.set.add("c")
+        self.assertEqual(self.set, set("abc"))
+
+    def test_add_absent(self):
+        self.set.add("d")
+        self.assertEqual(self.set, set("abcd"))
+
+    def test_add_until_full(self):
+        tmp = set()
+        expected_len = 0
+        for v in self.values:
+            tmp.add(v)
+            expected_len += 1
+            self.assertEqual(len(tmp), expected_len)
+        self.assertEqual(tmp, self.set)
+
+    def test_remove_present(self):
+        self.set.remove("b")
+        self.assertEqual(self.set, set("ac"))
+
+    def test_remove_absent(self):
+        try:
+            self.set.remove("d")
+            self.fail("Removing missing element should have raised LookupError")
+        except LookupError:
+            pass
+
+    def test_remove_until_empty(self):
+        expected_len = len(self.set)
+        for v in self.values:
+            self.set.remove(v)
+            expected_len -= 1
+            self.assertEqual(len(self.set), expected_len)
+
+    def test_discard_present(self):
+        self.set.discard("c")
+        self.assertEqual(self.set, set("ab"))
+
+    def test_discard_absent(self):
+        self.set.discard("d")
+        self.assertEqual(self.set, set("abc"))
+
+    def test_clear(self):
+        self.set.clear()
+        self.assertEqual(len(self.set), 0)
+
+    def test_pop(self):
+        popped = {}
+        while self.set:
+            popped[self.set.pop()] = None
+        self.assertEqual(len(popped), len(self.values))
+        for v in self.values:
+            self.assertIn(v, popped)
+
+    def test_update_empty_tuple(self):
+        self.set.update(())
+        self.assertEqual(self.set, set(self.values))
+
+    def test_update_unit_tuple_overlap(self):
+        self.set.update(("a",))
+        self.assertEqual(self.set, set(self.values))
+
+    def test_update_unit_tuple_non_overlap(self):
+        self.set.update(("a", "z"))
+        self.assertEqual(self.set, set(self.values + ["z"]))
+
+#==============================================================================
+
+class TestSubsets(unittest.TestCase):
+
+    case2method = {"<=": "issubset",
+                   ">=": "issuperset",
+                  }
+
+    reverse = {"==": "==",
+               "!=": "!=",
+               "<":  ">",
+               ">":  "<",
+               "<=": ">=",
+               ">=": "<=",
+              }
+
+    def test_issubset(self):
+        raise NotImplementedError() # eval not supported below
+        x = self.left
+        y = self.right
+        for case in "!=", "==", "<", "<=", ">", ">=":
+            expected = case in self.cases
+            # Test the binary infix spelling.
+            result = None ## eval("x" + case + "y", locals())
+            self.assertEqual(result, expected)
+            # Test the "friendly" method-name spelling, if one exists.
+            if case in TestSubsets.case2method:
+                method = getattr(x, TestSubsets.case2method[case])
+                result = method(y)
+                self.assertEqual(result, expected)
+
+            # Now do the same for the operands reversed.
+            rcase = TestSubsets.reverse[case]
+            result = None ## eval("y" + rcase + "x", locals())
+            self.assertEqual(result, expected)
+            if rcase in TestSubsets.case2method:
+                method = getattr(y, TestSubsets.case2method[rcase])
+                result = method(x)
+                self.assertEqual(result, expected)
+#------------------------------------------------------------------------------
+
+class TestSubsetEqualEmpty(TestSubsets):
+    left  = set() # type: Any
+    right = set() # type: Any
+    name  = "both empty"
+    cases = "==", "<=", ">="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetEqualNonEmpty(TestSubsets):
+    left  = set([1, 2])
+    right = set([1, 2])
+    name  = "equal pair"
+    cases = "==", "<=", ">="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetEmptyNonEmpty(TestSubsets):
+    left  = set() # type: Any
+    right = set([1, 2])
+    name  = "one empty, one non-empty"
+    cases = "!=", "<", "<="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetPartial(TestSubsets):
+    left  = set([1])
+    right = set([1, 2])
+    name  = "one a non-empty proper subset of other"
+    cases = "!=", "<", "<="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetNonOverlap(TestSubsets):
+    left  = set([1])
+    right = set([2])
+    name  = "neither empty, neither contains"
+    cases = "!="
+
+#==============================================================================
+
+class TestOnlySetsInBinaryOps(unittest.TestCase):
+
+    def test_eq_ne(self):
+        # Unlike the others, this is testing that == and != *are* allowed.
+        self.assertEqual(self.other == self.set, False)
+        self.assertEqual(self.set == self.other, False)
+        self.assertEqual(self.other != self.set, True)
+        self.assertEqual(self.set != self.other, True)
+
+    def test_ge_gt_le_lt(self):
+        self.assertRaises(TypeError, lambda: self.set < self.other)
+        self.assertRaises(TypeError, lambda: self.set <= self.other)
+        self.assertRaises(TypeError, lambda: self.set > self.other)
+        self.assertRaises(TypeError, lambda: self.set >= self.other)
+
+        self.assertRaises(TypeError, lambda: self.other < self.set)
+        self.assertRaises(TypeError, lambda: self.other <= self.set)
+        self.assertRaises(TypeError, lambda: self.other > self.set)
+        self.assertRaises(TypeError, lambda: self.other >= self.set)
+
+    def test_update_operator(self):
+        try:
+            self.set |= self.other
+        except TypeError:
+            pass
+        else:
+            self.fail("expected TypeError")
+
+    def test_update(self):
+        if self.otherIsIterable:
+            self.set.update(self.other)
+        else:
+            self.assertRaises(TypeError, self.set.update, self.other)
+
+    def test_union(self):
+        self.assertRaises(TypeError, lambda: self.set | self.other)
+        self.assertRaises(TypeError, lambda: self.other | self.set)
+        if self.otherIsIterable:
+            self.set.union(self.other)
+        else:
+            self.assertRaises(TypeError, self.set.union, self.other)
+
+    def test_intersection_update_operator(self):
+        try:
+            self.set &= self.other
+        except TypeError:
+            pass
+        else:
+            self.fail("expected TypeError")
+
+    def test_intersection_update(self):
+        if self.otherIsIterable:
+            self.set.intersection_update(self.other)
+        else:
+            self.assertRaises(TypeError,
+                              self.set.intersection_update,
+                              self.other)
+
+    def test_intersection(self):
+        self.assertRaises(TypeError, lambda: self.set & self.other)
+        self.assertRaises(TypeError, lambda: self.other & self.set)
+        if self.otherIsIterable:
+            self.set.intersection(self.other)
+        else:
+            self.assertRaises(TypeError, self.set.intersection, self.other)
+
+    def test_sym_difference_update_operator(self):
+        try:
+            self.set ^= self.other
+        except TypeError:
+            pass
+        else:
+            self.fail("expected TypeError")
+
+    def test_sym_difference_update(self):
+        if self.otherIsIterable:
+            self.set.symmetric_difference_update(self.other)
+        else:
+            self.assertRaises(TypeError,
+                              self.set.symmetric_difference_update,
+                              self.other)
+
+    def test_sym_difference(self):
+        self.assertRaises(TypeError, lambda: self.set ^ self.other)
+        self.assertRaises(TypeError, lambda: self.other ^ self.set)
+        if self.otherIsIterable:
+            self.set.symmetric_difference(self.other)
+        else:
+            self.assertRaises(TypeError, self.set.symmetric_difference, self.other)
+
+    def test_difference_update_operator(self):
+        try:
+            self.set -= self.other
+        except TypeError:
+            pass
+        else:
+            self.fail("expected TypeError")
+
+    def test_difference_update(self):
+        if self.otherIsIterable:
+            self.set.difference_update(self.other)
+        else:
+            self.assertRaises(TypeError,
+                              self.set.difference_update,
+                              self.other)
+
+    def test_difference(self):
+        self.assertRaises(TypeError, lambda: self.set - self.other)
+        self.assertRaises(TypeError, lambda: self.other - self.set)
+        if self.otherIsIterable:
+            self.set.difference(self.other)
+        else:
+            self.assertRaises(TypeError, self.set.difference, self.other)
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsNumeric(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        self.set   = set((1, 2, 3))
+        self.other = 19
+        self.otherIsIterable = False
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsDict(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        self.set   = set((1, 2, 3))
+        self.other = {1:2, 3:4}
+        self.otherIsIterable = True
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsOperator(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        self.set   = set((1, 2, 3))
+        self.other = operator.add
+        self.otherIsIterable = False
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsTuple(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        self.set   = set((1, 2, 3))
+        self.other = (2, 4, 6)
+        self.otherIsIterable = True
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsString(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        self.set   = set((1, 2, 3))
+        self.other = 'abc'
+        self.otherIsIterable = True
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsGenerator(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        def gen():
+            for i in range(0, 10, 2):
+                yield i
+        self.set   = set((1, 2, 3))
+        self.other = gen()
+        self.otherIsIterable = True
+
+#==============================================================================
+
+class TestCopying(unittest.TestCase):
+
+    def test_copy(self):
+        dup = self.set.copy()
+        dup_list = sorted(dup, key=repr)
+        set_list = sorted(self.set, key=repr)
+        self.assertEqual(len(dup_list), len(set_list))
+        for i in range(len(dup_list)):
+            self.assertTrue(dup_list[i] is set_list[i])
+
+    def test_deep_copy(self):
+        dup = copy.deepcopy(self.set)
+        ##print type(dup), repr(dup)
+        dup_list = sorted(dup, key=repr)
+        set_list = sorted(self.set, key=repr)
+        self.assertEqual(len(dup_list), len(set_list))
+        for i in range(len(dup_list)):
+            self.assertEqual(dup_list[i], set_list[i])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingEmpty(TestCopying):
+    def setUp(self):
+        self.set = set()
+
+#------------------------------------------------------------------------------
+
+class TestCopyingSingleton(TestCopying):
+    def setUp(self):
+        self.set = set(["hello"])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingTriple(TestCopying):
+    def setUp(self):
+        self.set = set(["zero", 0, None])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingTuple(TestCopying):
+    def setUp(self):
+        self.set = set([(1, 2)])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingNested(TestCopying):
+    def setUp(self):
+        self.set = set([((1, 2), (3, 4))])
+
+#==============================================================================
+
+class TestIdentities(unittest.TestCase):
+    def setUp(self):
+        self.a = set('abracadabra')
+        self.b = set('alacazam')
+
+    def test_binopsVsSubsets(self):
+        a, b = self.a, self.b
+        self.assertTrue(a - b < a)
+        self.assertTrue(b - a < b)
+        self.assertTrue(a & b < a)
+        self.assertTrue(a & b < b)
+        self.assertTrue(a | b > a)
+        self.assertTrue(a | b > b)
+        self.assertTrue(a ^ b < a | b)
+
+    def test_commutativity(self):
+        a, b = self.a, self.b
+        self.assertEqual(a&b, b&a)
+        self.assertEqual(a|b, b|a)
+        self.assertEqual(a^b, b^a)
+        if a != b:
+            self.assertNotEqual(a-b, b-a)
+
+    def test_summations(self):
+        # check that sums of parts equal the whole
+        a, b = self.a, self.b
+        self.assertEqual((a-b)|(a&b)|(b-a), a|b)
+        self.assertEqual((a&b)|(a^b), a|b)
+        self.assertEqual(a|(b-a), a|b)
+        self.assertEqual((a-b)|b, a|b)
+        self.assertEqual((a-b)|(a&b), a)
+        self.assertEqual((b-a)|(a&b), b)
+        self.assertEqual((a-b)|(b-a), a^b)
+
+    def test_exclusion(self):
+        # check that inverse operations show non-overlap
+        a, b, zero = self.a, self.b, set()
+        self.assertEqual((a-b)&b, zero)
+        self.assertEqual((b-a)&a, zero)
+        self.assertEqual((a&b)&(a^b), zero)
+
+# Tests derived from test_itertools.py =======================================
+
+def R(seqn):
+    'Regular generator'
+    for i in seqn:
+        yield i
+
+class G:
+    'Sequence using __getitem__'
+    def __init__(self, seqn):
+        self.seqn = seqn
+    def __getitem__(self, i):
+        return self.seqn[i]
+
+class I:
+    'Sequence using iterator protocol'
+    def __init__(self, seqn):
+        self.seqn = seqn
+        self.i = 0
+    def __iter__(self):
+        return self
+    def __next__(self):
+        if self.i >= len(self.seqn): raise StopIteration
+        v = self.seqn[self.i]
+        self.i += 1
+        return v
+
+class Ig:
+    'Sequence using iterator protocol defined with a generator'
+    def __init__(self, seqn):
+        self.seqn = seqn
+        self.i = 0
+    def __iter__(self):
+        for val in self.seqn:
+            yield val
+
+class X:
+    'Missing __getitem__ and __iter__'
+    def __init__(self, seqn):
+        self.seqn = seqn
+        self.i = 0
+    def __next__(self):
+        if self.i >= len(self.seqn): raise StopIteration
+        v = self.seqn[self.i]
+        self.i += 1
+        return v
+
+class N:
+    'Iterator missing __next__()'
+    def __init__(self, seqn):
+        self.seqn = seqn
+        self.i = 0
+    def __iter__(self):
+        return self
+
+class E:
+    'Test propagation of exceptions'
+    def __init__(self, seqn):
+        self.seqn = seqn
+        self.i = 0
+    def __iter__(self):
+        return self
+    def __next__(self):
+        3 // 0
+
+class S:
+    'Test immediate stop'
+    def __init__(self, seqn):
+        pass
+    def __iter__(self):
+        return self
+    def __next__(self):
+        raise StopIteration
+
+from itertools import chain
+def L(seqn):
+    'Test multiple tiers of iterators'
+    return chain(map(lambda x:x, R(Ig(G(seqn)))))
+
+class TestVariousIteratorArgs(unittest.TestCase):
+
+    def test_constructor(self):
+        for cons in (set, frozenset):
+            for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
+                for g in (G, I, Ig, S, L, R):
+                    self.assertEqual(sorted(cons(g(s)), key=repr), sorted(g(s), key=repr))
+                self.assertRaises(TypeError, cons , X(s))
+                self.assertRaises(TypeError, cons , N(s))
+                self.assertRaises(ZeroDivisionError, cons , E(s))
+
+    def test_inline_methods(self):
+        s = set('november')
+        for data in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5), 'december'):
+            for meth in (s.union, s.intersection, s.difference, s.symmetric_difference, s.isdisjoint):
+                for g in (G, I, Ig, L, R):
+                    expected = meth(data)
+                    actual = meth(G(data))
+                    if isinstance(expected, bool):
+                        self.assertEqual(actual, expected)
+                    else:
+                        self.assertEqual(sorted(actual, key=repr), sorted(expected, key=repr))
+                self.assertRaises(TypeError, meth, X(s))
+                self.assertRaises(TypeError, meth, N(s))
+                self.assertRaises(ZeroDivisionError, meth, E(s))
+
+    def test_inplace_methods(self):
+        for data in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5), 'december'):
+            for methname in ('update', 'intersection_update',
+                             'difference_update', 'symmetric_difference_update'):
+                for g in (G, I, Ig, S, L, R):
+                    s = set('january')
+                    t = s.copy()
+                    getattr(s, methname)(list(g(data)))
+                    getattr(t, methname)(g(data))
+                    self.assertEqual(sorted(s, key=repr), sorted(t, key=repr))
+
+                self.assertRaises(TypeError, getattr(set('january'), methname), X(data))
+                self.assertRaises(TypeError, getattr(set('january'), methname), N(data))
+                self.assertRaises(ZeroDivisionError, getattr(set('january'), methname), E(data))
+
+be_bad = set2 = dict2 = None  # type: Any
+
+class bad_eq:
+    def __eq__(self, other):
+        if be_bad:
+            set2.clear()
+            raise ZeroDivisionError
+        return self is other
+    def __hash__(self):
+        return 0
+
+class bad_dict_clear:
+    def __eq__(self, other):
+        if be_bad:
+            dict2.clear()
+        return self is other
+    def __hash__(self):
+        return 0
+
+class TestWeirdBugs(unittest.TestCase):
+    def test_8420_set_merge(self):
+        # This used to segfault
+        global be_bad, set2, dict2
+        be_bad = False
+        set1 = {bad_eq()}
+        set2 = {bad_eq() for i in range(75)}
+        be_bad = True
+        self.assertRaises(ZeroDivisionError, set1.update, set2)
+
+        be_bad = False
+        set1 = {bad_dict_clear()}
+        dict2 = {bad_dict_clear(): None}
+        be_bad = True
+        set1.symmetric_difference_update(dict2)
+
+# Application tests (based on David Eppstein's graph recipes ====================================
+
+def powerset(U):
+    """Generates all subsets of a set or sequence U."""
+    U = iter(U)
+    try:
+        x = frozenset([next(U)])
+        for S in powerset(U):
+            yield S
+            yield S | x
+    except StopIteration:
+        yield frozenset()
+
+def cube(n):
+    """Graph of n-dimensional hypercube."""
+    singletons = [frozenset([x]) for x in range(n)]
+    return dict([(x, frozenset([x^s for s in singletons]))
+                 for x in powerset(range(n))])
+
+def linegraph(G):
+    """Graph, the vertices of which are edges of G,
+    with two vertices being adjacent iff the corresponding
+    edges share a vertex."""
+    L = {}
+    for x in G:
+        for y in G[x]:
+            nx = [frozenset([x,z]) for z in G[x] if z != y]
+            ny = [frozenset([y,z]) for z in G[y] if z != x]
+            L[frozenset([x,y])] = frozenset(nx+ny)
+    return L
+
+def faces(G):
+    'Return a set of faces in G.  Where a face is a set of vertices on that face'
+    # currently limited to triangles,squares, and pentagons
+    f = set()
+    for v1, edges in G.items():
+        for v2 in edges:
+            for v3 in G[v2]:
+                if v1 == v3:
+                    continue
+                if v1 in G[v3]:
+                    f.add(frozenset([v1, v2, v3]))
+                else:
+                    for v4 in G[v3]:
+                        if v4 == v2:
+                            continue
+                        if v1 in G[v4]:
+                            f.add(frozenset([v1, v2, v3, v4]))
+                        else:
+                            for v5 in G[v4]:
+                                if v5 == v3 or v5 == v2:
+                                    continue
+                                if v1 in G[v5]:
+                                    f.add(frozenset([v1, v2, v3, v4, v5]))
+    return f
+
+
+class TestGraphs(unittest.TestCase):
+
+    def test_cube(self):
+
+        g = cube(3)                             # vert --> {v1, v2, v3}
+        vertices1 = set(g)
+        self.assertEqual(len(vertices1), 8)     # eight vertices
+        for edge in g.values():
+            self.assertEqual(len(edge), 3)      # each vertex connects to three edges
+        vertices2 = set()
+        for edges in g.values():
+            for v in edges:
+                vertices2.add(v)
+        self.assertEqual(vertices1, vertices2)  # edge vertices in original set
+
+        cubefaces = faces(g)
+        self.assertEqual(len(cubefaces), 6)     # six faces
+        for face in cubefaces:
+            self.assertEqual(len(face), 4)      # each face is a square
+
+    def test_cuboctahedron(self):
+
+        # http://en.wikipedia.org/wiki/Cuboctahedron
+        # 8 triangular faces and 6 square faces
+        # 12 indentical vertices each connecting a triangle and square
+
+        g = cube(3)
+        cuboctahedron = linegraph(g)            # V( --> {V1, V2, V3, V4}
+        self.assertEqual(len(cuboctahedron), 12)# twelve vertices
+
+        vertices = set(cuboctahedron)
+        for edges in cuboctahedron.values():
+            self.assertEqual(len(edges), 4)     # each vertex connects to four other vertices
+        othervertices = set(edge for edges in cuboctahedron.values() for edge in edges)
+        self.assertEqual(vertices, othervertices)   # edge vertices in original set
+
+        cubofaces = faces(cuboctahedron)
+        facesizes = collections.defaultdict(int)
+        for face in cubofaces:
+            facesizes[len(face)] += 1
+        self.assertEqual(facesizes[3], 8)       # eight triangular faces
+        self.assertEqual(facesizes[4], 6)       # six square faces
+
+        for vertex in cuboctahedron:
+            edge = vertex                       # Cuboctahedron vertices are edges in Cube
+            self.assertEqual(len(edge), 2)      # Two cube vertices define an edge
+            for cubevert in edge:
+                self.assertIn(cubevert, g)
+
+
+#==============================================================================
+
+def test_main(verbose=None):
+    test_classes = (
+        TestSet,
+        TestSetSubclass,
+        TestSetSubclassWithKeywordArgs,
+        TestFrozenSet,
+        TestFrozenSetSubclass,
+        TestSetOfSets,
+        TestExceptionPropagation,
+        TestBasicOpsEmpty,
+        TestBasicOpsSingleton,
+        TestBasicOpsTuple,
+        TestBasicOpsTriple,
+        TestBasicOpsString,
+        TestBasicOpsBytes,
+        TestBasicOpsMixedStringBytes,
+        TestBinaryOps,
+        TestUpdateOps,
+        TestMutate,
+        TestSubsetEqualEmpty,
+        TestSubsetEqualNonEmpty,
+        TestSubsetEmptyNonEmpty,
+        TestSubsetPartial,
+        TestSubsetNonOverlap,
+        TestOnlySetsNumeric,
+        TestOnlySetsDict,
+        TestOnlySetsOperator,
+        TestOnlySetsTuple,
+        TestOnlySetsString,
+        TestOnlySetsGenerator,
+        TestCopyingEmpty,
+        TestCopyingSingleton,
+        TestCopyingTriple,
+        TestCopyingTuple,
+        TestCopyingNested,
+        TestIdentities,
+        TestVariousIteratorArgs,
+        TestGraphs,
+        TestWeirdBugs,
+        )
+
+    support.run_unittest(*test_classes)
+
+    # verify reference counting
+    if verbose and hasattr(sys, "gettotalrefcount"):
+        import gc
+        counts = [None] * 5
+        for i in range(len(counts)):
+            support.run_unittest(*test_classes)
+            gc.collect()
+            counts[i] = sys.gettotalrefcount()
+        print(counts)
+
+if __name__ == "__main__":
+    test_main(verbose=True)
diff --git a/test-data/stdlib-samples/3.2/test/test_shutil.py b/test-data/stdlib-samples/3.2/test/test_shutil.py
new file mode 100644
index 0000000..32e0fd1
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_shutil.py
@@ -0,0 +1,978 @@
+# Copyright (C) 2003 Python Software Foundation
+
+import unittest
+import shutil
+import tempfile
+import sys
+import stat
+import os
+import os.path
+import functools
+from test import support
+from test.support import TESTFN
+from os.path import splitdrive
+from distutils.spawn import find_executable, spawn
+from shutil import (_make_tarball, _make_zipfile, make_archive,
+                    register_archive_format, unregister_archive_format,
+                    get_archive_formats, Error, unpack_archive,
+                    register_unpack_format, RegistryError,
+                    unregister_unpack_format, get_unpack_formats)
+import tarfile
+import warnings
+
+from test import support
+from test.support import check_warnings, captured_stdout
+
+from typing import (
+    Any, Callable, Tuple, List, Sequence, BinaryIO, IO, Union, cast
+)
+from types import TracebackType
+
+import bz2
+BZ2_SUPPORTED = True
+
+TESTFN2 = TESTFN + "2"
+
+import grp
+import pwd
+UID_GID_SUPPORT = True
+
+import zlib
+
+import zipfile
+ZIP_SUPPORT = True
+
+def _fake_rename(*args: Any, **kwargs: Any) -> None:
+    # Pretend the destination path is on a different filesystem.
+    raise OSError()
+
+def mock_rename(func: Any) -> Any:
+    @functools.wraps(func)
+    def wrap(*args: Any, **kwargs: Any) -> Any:
+        try:
+            builtin_rename = shutil.rename
+            shutil.rename = cast(Any, _fake_rename)
+            return func(*args, **kwargs)
+        finally:
+            shutil.rename = cast(Any, builtin_rename)
+    return wrap
+
+class TestShutil(unittest.TestCase):
+
+    def setUp(self) -> None:
+        super().setUp()
+        self.tempdirs = []  # type: List[str]
+
+    def tearDown(self) -> None:
+        super().tearDown()
+        while self.tempdirs:
+            d = self.tempdirs.pop()
+            shutil.rmtree(d, os.name in ('nt', 'cygwin'))
+
+    def write_file(self, path: Union[str, List[str], tuple], content: str = 'xxx') -> None:
+        """Writes a file in the given path.
+
+
+        path can be a string or a sequence.
+        """
+        if isinstance(path, list):
+            path = os.path.join(*path)
+        elif isinstance(path, tuple):
+            path = cast(str, os.path.join(*path))
+        f = open(path, 'w')
+        try:
+            f.write(content)
+        finally:
+            f.close()
+
+    def mkdtemp(self) -> str:
+        """Create a temporary directory that will be cleaned up.
+
+        Returns the path of the directory.
+        """
+        d = tempfile.mkdtemp()
+        self.tempdirs.append(d)
+        return d
+
+    def test_rmtree_errors(self) -> None:
+        # filename is guaranteed not to exist
+        filename = tempfile.mktemp()
+        self.assertRaises(OSError, shutil.rmtree, filename)
+
+    # See bug #1071513 for why we don't run this on cygwin
+    # and bug #1076467 for why we don't run this as root.
+    if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin'
+        and not (hasattr(os, 'geteuid') and os.geteuid() == 0)):
+        def test_on_error(self) -> None:
+            self.errorState = 0
+            os.mkdir(TESTFN)
+            self.childpath = os.path.join(TESTFN, 'a')
+            f = open(self.childpath, 'w')
+            f.close()
+            old_dir_mode = os.stat(TESTFN).st_mode
+            old_child_mode = os.stat(self.childpath).st_mode
+            # Make unwritable.
+            os.chmod(self.childpath, stat.S_IREAD)
+            os.chmod(TESTFN, stat.S_IREAD)
+
+            shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror)
+            # Test whether onerror has actually been called.
+            self.assertEqual(self.errorState, 2,
+                             "Expected call to onerror function did not happen.")
+
+            # Make writable again.
+            os.chmod(TESTFN, old_dir_mode)
+            os.chmod(self.childpath, old_child_mode)
+
+            # Clean up.
+            shutil.rmtree(TESTFN)
+
+    def check_args_to_onerror(self, func: Callable[[str], Any], arg: str,
+                              exc: Tuple[type, BaseException,
+                                         TracebackType]) -> None:
+        # test_rmtree_errors deliberately runs rmtree
+        # on a directory that is chmod 400, which will fail.
+        # This function is run when shutil.rmtree fails.
+        # 99.9% of the time it initially fails to remove
+        # a file in the directory, so the first time through
+        # func is os.remove.
+        # However, some Linux machines running ZFS on
+        # FUSE experienced a failure earlier in the process
+        # at os.listdir.  The first failure may legally
+        # be either.
+        if self.errorState == 0:
+            if func is os.remove:
+                self.assertEqual(arg, self.childpath)
+            else:
+                self.assertIs(func, os.listdir,
+                              "func must be either os.remove or os.listdir")
+                self.assertEqual(arg, TESTFN)
+            self.assertTrue(issubclass(exc[0], OSError))
+            self.errorState = 1
+        else:
+            self.assertEqual(func, os.rmdir)
+            self.assertEqual(arg, TESTFN)
+            self.assertTrue(issubclass(exc[0], OSError))
+            self.errorState = 2
+
+    def test_rmtree_dont_delete_file(self) -> None:
+        # When called on a file instead of a directory, don't delete it.
+        handle, path = tempfile.mkstemp()
+        os.fdopen(handle).close()
+        self.assertRaises(OSError, shutil.rmtree, path)
+        os.remove(path)
+
+    def _write_data(self, path: str, data: str) -> None:
+        f = open(path, "w")
+        f.write(data)
+        f.close()
+
+    def test_copytree_simple(self) -> None:
+
+        def read_data(path: str) -> str:
+            f = open(path)
+            data = f.read()
+            f.close()
+            return data
+
+        src_dir = tempfile.mkdtemp()
+        dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
+        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+
+        try:
+            shutil.copytree(src_dir, dst_dir)
+            self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
+            self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
+            self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
+                                                        'test.txt')))
+            actual = read_data(os.path.join(dst_dir, 'test.txt'))
+            self.assertEqual(actual, '123')
+            actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
+            self.assertEqual(actual, '456')
+        finally:
+            for path in (
+                    os.path.join(src_dir, 'test.txt'),
+                    os.path.join(dst_dir, 'test.txt'),
+                    os.path.join(src_dir, 'test_dir', 'test.txt'),
+                    os.path.join(dst_dir, 'test_dir', 'test.txt'),
+                ):
+                if os.path.exists(path):
+                    os.remove(path)
+            for path in (src_dir,
+                    os.path.dirname(dst_dir)
+                ):
+                if os.path.exists(path):
+                    shutil.rmtree(path)
+
+    def test_copytree_with_exclude(self) -> None:
+
+        def read_data(path: str) -> str:
+            f = open(path)
+            data = f.read()
+            f.close()
+            return data
+
+        # creating data
+        join = os.path.join
+        exists = os.path.exists
+        src_dir = tempfile.mkdtemp()
+        try:
+            dst_dir = join(tempfile.mkdtemp(), 'destination')
+            self._write_data(join(src_dir, 'test.txt'), '123')
+            self._write_data(join(src_dir, 'test.tmp'), '123')
+            os.mkdir(join(src_dir, 'test_dir'))
+            self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
+            os.mkdir(join(src_dir, 'test_dir2'))
+            self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
+            os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
+            os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
+            self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'),
+                             '456')
+            self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'),
+                             '456')
+
+
+            # testing glob-like patterns
+            try:
+                patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
+                shutil.copytree(src_dir, dst_dir, ignore=patterns)
+                # checking the result: some elements should not be copied
+                self.assertTrue(exists(join(dst_dir, 'test.txt')))
+                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+            try:
+                patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
+                shutil.copytree(src_dir, dst_dir, ignore=patterns)
+                # checking the result: some elements should not be copied
+                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+
+            # testing callable-style
+            try:
+                def _filter(src: str, names: Sequence[str]) -> List[str]:
+                    res = []  # type: List[str]
+                    for name in names:
+                        path = os.path.join(src, name)
+
+                        if (os.path.isdir(path) and
+                            path.split()[-1] == 'subdir'):
+                            res.append(name)
+                        elif os.path.splitext(path)[-1] in ('.py'):
+                            res.append(name)
+                    return res
+
+                shutil.copytree(src_dir, dst_dir, ignore=_filter)
+
+                # checking the result: some elements should not be copied
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2',
+                                        'test.py')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
+
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+        finally:
+            shutil.rmtree(src_dir)
+            shutil.rmtree(os.path.dirname(dst_dir))
+
+    @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link')
+    def test_dont_copy_file_onto_link_to_itself(self) -> None:
+        # Temporarily disable test on Windows.
+        if os.name == 'nt':
+            return
+        # bug 851123.
+        os.mkdir(TESTFN)
+        src = os.path.join(TESTFN, 'cheese')
+        dst = os.path.join(TESTFN, 'shop')
+        try:
+            with open(src, 'w') as f:
+                f.write('cheddar')
+            os.link(src, dst)
+            self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+            with open(src, 'r') as f:
+                self.assertEqual(f.read(), 'cheddar')
+            os.remove(dst)
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    @support.skip_unless_symlink
+    def test_dont_copy_file_onto_symlink_to_itself(self) -> None:
+        # bug 851123.
+        os.mkdir(TESTFN)
+        src = os.path.join(TESTFN, 'cheese')
+        dst = os.path.join(TESTFN, 'shop')
+        try:
+            with open(src, 'w') as f:
+                f.write('cheddar')
+            # Using `src` here would mean we end up with a symlink pointing
+            # to TESTFN/TESTFN/cheese, while it should point at
+            # TESTFN/cheese.
+            os.symlink('cheese', dst)
+            self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+            with open(src, 'r') as f:
+                self.assertEqual(f.read(), 'cheddar')
+            os.remove(dst)
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    @support.skip_unless_symlink
+    def test_rmtree_on_symlink(self) -> None:
+        # bug 1669.
+        os.mkdir(TESTFN)
+        try:
+            src = os.path.join(TESTFN, 'cheese')
+            dst = os.path.join(TESTFN, 'shop')
+            os.mkdir(src)
+            os.symlink(src, dst)
+            self.assertRaises(OSError, shutil.rmtree, dst)
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    if hasattr(os, "mkfifo"):
+        # Issue #3002: copyfile and copytree block indefinitely on named pipes
+        def test_copyfile_named_pipe(self) -> None:
+            os.mkfifo(TESTFN)
+            try:
+                self.assertRaises(shutil.SpecialFileError,
+                                  shutil.copyfile, TESTFN, TESTFN2)
+                self.assertRaises(shutil.SpecialFileError,
+                                  shutil.copyfile, __file__, TESTFN)
+            finally:
+                os.remove(TESTFN)
+
+        @support.skip_unless_symlink
+        def test_copytree_named_pipe(self) -> None:
+            os.mkdir(TESTFN)
+            try:
+                subdir = os.path.join(TESTFN, "subdir")
+                os.mkdir(subdir)
+                pipe = os.path.join(subdir, "mypipe")
+                os.mkfifo(pipe)
+                try:
+                    shutil.copytree(TESTFN, TESTFN2)
+                except shutil.Error as e:
+                    errors = e.args[0]
+                    self.assertEqual(len(errors), 1)
+                    src, dst, error_msg = errors[0]
+                    self.assertEqual("`%s` is a named pipe" % pipe, error_msg)
+                else:
+                    self.fail("shutil.Error should have been raised")
+            finally:
+                shutil.rmtree(TESTFN, ignore_errors=True)
+                shutil.rmtree(TESTFN2, ignore_errors=True)
+
+    def test_copytree_special_func(self) -> None:
+
+        src_dir = self.mkdtemp()
+        dst_dir = os.path.join(self.mkdtemp(), 'destination')
+        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+
+        copied = []  # type: List[Tuple[str, str]]
+        def _copy(src: str, dst: str) -> None:
+            copied.append((src, dst))
+
+        shutil.copytree(src_dir, dst_dir, copy_function=_copy)
+        self.assertEqual(len(copied), 2)
+
+    @support.skip_unless_symlink
+    def test_copytree_dangling_symlinks(self) -> None:
+
+        # a dangling symlink raises an error at the end
+        src_dir = self.mkdtemp()
+        dst_dir = os.path.join(self.mkdtemp(), 'destination')
+        os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt'))
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+        self.assertRaises(Error, shutil.copytree, src_dir, dst_dir)
+
+        # a dangling symlink is ignored with the proper flag
+        dst_dir = os.path.join(self.mkdtemp(), 'destination2')
+        shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True)
+        self.assertNotIn('test.txt', os.listdir(dst_dir))
+
+        # a dangling symlink is copied if symlinks=True
+        dst_dir = os.path.join(self.mkdtemp(), 'destination3')
+        shutil.copytree(src_dir, dst_dir, symlinks=True)
+        self.assertIn('test.txt', os.listdir(dst_dir))
+
+    def _copy_file(self,
+                   method: Callable[[str, str], None]) -> Tuple[str, str]:
+        fname = 'test.txt'
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, fname])
+        file1 = os.path.join(tmpdir, fname)
+        tmpdir2 = self.mkdtemp()
+        method(file1, tmpdir2)
+        file2 = os.path.join(tmpdir2, fname)
+        return (file1, file2)
+
+    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
+    def test_copy(self) -> None:
+        # Ensure that the copied file exists and has the same mode bits.
+        file1, file2 = self._copy_file(shutil.copy)
+        self.assertTrue(os.path.exists(file2))
+        self.assertEqual(os.stat(file1).st_mode, os.stat(file2).st_mode)
+
+    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
+    @unittest.skipUnless(hasattr(os, 'utime'), 'requires os.utime')
+    def test_copy2(self) -> None:
+        # Ensure that the copied file exists and has the same mode and
+        # modification time bits.
+        file1, file2 = self._copy_file(shutil.copy2)
+        self.assertTrue(os.path.exists(file2))
+        file1_stat = os.stat(file1)
+        file2_stat = os.stat(file2)
+        self.assertEqual(file1_stat.st_mode, file2_stat.st_mode)
+        for attr in 'st_atime', 'st_mtime':
+            # The modification times may be truncated in the new file.
+            self.assertLessEqual(getattr(file1_stat, attr),
+                                 getattr(file2_stat, attr) + 1)
+        if hasattr(os, 'chflags') and hasattr(file1_stat, 'st_flags'):
+            self.assertEqual(getattr(file1_stat, 'st_flags'),
+                             getattr(file2_stat, 'st_flags'))
+
+    @unittest.skipUnless(zlib, "requires zlib")
+    def test_make_tarball(self) -> None:
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, 'file1'], 'xxx')
+        self.write_file([tmpdir, 'file2'], 'xxx')
+        os.mkdir(os.path.join(tmpdir, 'sub'))
+        self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
+
+        tmpdir2 = self.mkdtemp()
+        # force shutil to create the directory
+        os.rmdir(tmpdir2)
+        unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
+                            "source and target should be on same drive")
+
+        base_name = os.path.join(tmpdir2, 'archive')
+
+        # working with relative paths to avoid tar warnings
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(splitdrive(base_name)[1], '.')
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.tar.gz'
+        self.assertTrue(os.path.exists(tarball))
+
+        # trying an uncompressed one
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(splitdrive(base_name)[1], '.', compress=None)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+    def _tarinfo(self, path: str) -> tuple:
+        tar = tarfile.open(path)
+        try:
+            names = tar.getnames()
+            names.sort()
+            return tuple(names)
+        finally:
+            tar.close()
+
+    def _create_files(self) -> Tuple[str, str, str]:
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        dist = os.path.join(tmpdir, 'dist')
+        os.mkdir(dist)
+        self.write_file([dist, 'file1'], 'xxx')
+        self.write_file([dist, 'file2'], 'xxx')
+        os.mkdir(os.path.join(dist, 'sub'))
+        self.write_file([dist, 'sub', 'file3'], 'xxx')
+        os.mkdir(os.path.join(dist, 'sub2'))
+        tmpdir2 = self.mkdtemp()
+        base_name = os.path.join(tmpdir2, 'archive')
+        return tmpdir, tmpdir2, base_name
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
+                         'Need the tar command to run')
+    def test_tarfile_vs_tar(self) -> None:
+        tmpdir, tmpdir2, base_name =  self._create_files()
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist')
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.tar.gz'
+        self.assertTrue(os.path.exists(tarball))
+
+        # now create another tarball using `tar`
+        tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
+        tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
+        gzip_cmd = ['gzip', '-f9', 'archive2.tar']
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            with captured_stdout() as s:
+                spawn(tar_cmd)
+                spawn(gzip_cmd)
+        finally:
+            os.chdir(old_dir)
+
+        self.assertTrue(os.path.exists(tarball2))
+        # let's compare both tarballs
+        self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
+
+        # trying an uncompressed one
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist', compress=None)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+        # now for a dry_run
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist', compress=None, dry_run=True)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
+    def test_make_zipfile(self) -> None:
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, 'file1'], 'xxx')
+        self.write_file([tmpdir, 'file2'], 'xxx')
+
+        tmpdir2 = self.mkdtemp()
+        # force shutil to create the directory
+        os.rmdir(tmpdir2)
+        base_name = os.path.join(tmpdir2, 'archive')
+        _make_zipfile(base_name, tmpdir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.zip'
+        self.assertTrue(os.path.exists(tarball))
+
+
+    def test_make_archive(self) -> None:
+        tmpdir = self.mkdtemp()
+        base_name = os.path.join(tmpdir, 'archive')
+        self.assertRaises(ValueError, make_archive, base_name, 'xxx')
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    def test_make_archive_owner_group(self) -> None:
+        # testing make_archive with owner and group, with various combinations
+        # this works even if there's not gid/uid support
+        if UID_GID_SUPPORT:
+            group = grp.getgrgid(0).gr_name
+            owner = pwd.getpwuid(0).pw_name
+        else:
+            group = owner = 'root'
+
+        base_dir, root_dir, base_name =  self._create_files()
+        base_name = os.path.join(self.mkdtemp() , 'archive')
+        res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
+                           group=group)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'zip', root_dir, base_dir)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'tar', root_dir, base_dir,
+                           owner=owner, group=group)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'tar', root_dir, base_dir,
+                           owner='kjhkjhkjg', group='oihohoh')
+        self.assertTrue(os.path.exists(res))
+
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
+    def test_tarfile_root_owner(self) -> None:
+        tmpdir, tmpdir2, base_name =  self._create_files()
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        group = grp.getgrgid(0).gr_name
+        owner = pwd.getpwuid(0).pw_name
+        try:
+            archive_name = _make_tarball(base_name, 'dist', compress=None,
+                                         owner=owner, group=group)
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        self.assertTrue(os.path.exists(archive_name))
+
+        # now checks the rights
+        archive = tarfile.open(archive_name)
+        try:
+            for member in archive.getmembers():
+                self.assertEqual(member.uid, 0)
+                self.assertEqual(member.gid, 0)
+        finally:
+            archive.close()
+
+    def test_make_archive_cwd(self) -> None:
+        current_dir = os.getcwd()
+        def _breaks(*args: Any, **kw: Any) -> None:
+            raise RuntimeError()
+
+        register_archive_format('xxx', _breaks, [], 'xxx file')
+        try:
+            try:
+                make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
+            except Exception:
+                pass
+            self.assertEqual(os.getcwd(), current_dir)
+        finally:
+            unregister_archive_format('xxx')
+
+    def test_register_archive_format(self) -> None:
+
+        self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
+        self.assertRaises(TypeError, register_archive_format, 'xxx',
+                          lambda: 1/0,
+                          1)
+        self.assertRaises(TypeError, register_archive_format, 'xxx',
+                          lambda: 1/0,
+                          [(1, 2), (1, 2, 3)])
+
+        register_archive_format('xxx', lambda: 1/0, [('x', 2)], 'xxx file')
+        formats = [name for name, params in get_archive_formats()]
+        self.assertIn('xxx', formats)
+
+        unregister_archive_format('xxx')
+        formats = [name for name, params in get_archive_formats()]
+        self.assertNotIn('xxx', formats)
+
+    def _compare_dirs(self, dir1: str, dir2: str) -> List[str]:
+        # check that dir1 and dir2 are equivalent,
+        # return the diff
+        diff = []  # type: List[str]
+        for root, dirs, files in os.walk(dir1):
+            for file_ in files:
+                path = os.path.join(root, file_)
+                target_path = os.path.join(dir2, os.path.split(path)[-1])
+                if not os.path.exists(target_path):
+                    diff.append(file_)
+        return diff
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    def test_unpack_archive(self) -> None:
+        formats = ['tar', 'gztar', 'zip']
+        if BZ2_SUPPORTED:
+            formats.append('bztar')
+
+        for format in formats:
+            tmpdir = self.mkdtemp()
+            base_dir, root_dir, base_name =  self._create_files()
+            tmpdir2 = self.mkdtemp()
+            filename = make_archive(base_name, format, root_dir, base_dir)
+
+            # let's try to unpack it now
+            unpack_archive(filename, tmpdir2)
+            diff = self._compare_dirs(tmpdir, tmpdir2)
+            self.assertEqual(diff, [])
+
+            # and again, this time with the format specified
+            tmpdir3 = self.mkdtemp()
+            unpack_archive(filename, tmpdir3, format=format)
+            diff = self._compare_dirs(tmpdir, tmpdir3)
+            self.assertEqual(diff, [])
+        self.assertRaises(shutil.ReadError, unpack_archive, TESTFN)
+        self.assertRaises(ValueError, unpack_archive, TESTFN, format='xxx')
+
+    def test_unpack_registery(self) -> None:
+
+        formats = get_unpack_formats()
+
+        def _boo(filename: str, extract_dir: str, extra: int) -> None:
+            self.assertEqual(extra, 1)
+            self.assertEqual(filename, 'stuff.boo')
+            self.assertEqual(extract_dir, 'xx')
+
+        register_unpack_format('Boo', ['.boo', '.b2'], _boo, [('extra', 1)])
+        unpack_archive('stuff.boo', 'xx')
+
+        # trying to register a .boo unpacker again
+        self.assertRaises(RegistryError, register_unpack_format, 'Boo2',
+                          ['.boo'], _boo)
+
+        # should work now
+        unregister_unpack_format('Boo')
+        register_unpack_format('Boo2', ['.boo'], _boo)
+        self.assertIn(('Boo2', ['.boo'], ''), get_unpack_formats())
+        self.assertNotIn(('Boo', ['.boo'], ''), get_unpack_formats())
+
+        # let's leave a clean state
+        unregister_unpack_format('Boo2')
+        self.assertEqual(get_unpack_formats(), formats)
+
+
+class TestMove(unittest.TestCase):
+
+    def setUp(self) -> None:
+        filename = "foo"
+        self.src_dir = tempfile.mkdtemp()
+        self.dst_dir = tempfile.mkdtemp()
+        self.src_file = os.path.join(self.src_dir, filename)
+        self.dst_file = os.path.join(self.dst_dir, filename)
+        with open(self.src_file, "wb") as f:
+            f.write(b"spam")
+
+    def tearDown(self) -> None:
+        for d in (self.src_dir, self.dst_dir):
+            try:
+                if d:
+                    shutil.rmtree(d)
+            except:
+                pass
+
+    def _check_move_file(self, src: str, dst: str, real_dst: str) -> None:
+        with open(src, "rb") as f:
+            contents = f.read()
+        shutil.move(src, dst)
+        with open(real_dst, "rb") as f:
+            self.assertEqual(contents, f.read())
+        self.assertFalse(os.path.exists(src))
+
+    def _check_move_dir(self, src: str, dst: str, real_dst: str) -> None:
+        contents = sorted(os.listdir(src))
+        shutil.move(src, dst)
+        self.assertEqual(contents, sorted(os.listdir(real_dst)))
+        self.assertFalse(os.path.exists(src))
+
+    def test_move_file(self) -> None:
+        # Move a file to another location on the same filesystem.
+        self._check_move_file(self.src_file, self.dst_file, self.dst_file)
+
+    def test_move_file_to_dir(self) -> None:
+        # Move a file inside an existing dir on the same filesystem.
+        self._check_move_file(self.src_file, self.dst_dir, self.dst_file)
+
+    @mock_rename
+    def test_move_file_other_fs(self) -> None:
+        # Move a file to an existing dir on another filesystem.
+        self.test_move_file()
+
+    @mock_rename
+    def test_move_file_to_dir_other_fs(self) -> None:
+        # Move a file to another location on another filesystem.
+        self.test_move_file_to_dir()
+
+    def test_move_dir(self) -> None:
+        # Move a dir to another location on the same filesystem.
+        dst_dir = tempfile.mktemp()
+        try:
+            self._check_move_dir(self.src_dir, dst_dir, dst_dir)
+        finally:
+            try:
+                shutil.rmtree(dst_dir)
+            except:
+                pass
+
+    @mock_rename
+    def test_move_dir_other_fs(self) -> None:
+        # Move a dir to another location on another filesystem.
+        self.test_move_dir()
+
+    def test_move_dir_to_dir(self) -> None:
+        # Move a dir inside an existing dir on the same filesystem.
+        self._check_move_dir(self.src_dir, self.dst_dir,
+            os.path.join(self.dst_dir, os.path.basename(self.src_dir)))
+
+    @mock_rename
+    def test_move_dir_to_dir_other_fs(self) -> None:
+        # Move a dir inside an existing dir on another filesystem.
+        self.test_move_dir_to_dir()
+
+    def test_existing_file_inside_dest_dir(self) -> None:
+        # A file with the same name inside the destination dir already exists.
+        with open(self.dst_file, "wb"):
+            pass
+        self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir)
+
+    def test_dont_move_dir_in_itself(self) -> None:
+        # Moving a dir inside itself raises an Error.
+        dst = os.path.join(self.src_dir, "bar")
+        self.assertRaises(shutil.Error, shutil.move, self.src_dir, dst)
+
+    def test_destinsrc_false_negative(self) -> None:
+        os.mkdir(TESTFN)
+        try:
+            for src, dst in [('srcdir', 'srcdir/dest')]:
+                src = os.path.join(TESTFN, src)
+                dst = os.path.join(TESTFN, dst)
+                self.assertTrue(shutil._destinsrc(src, dst),
+                             msg='_destinsrc() wrongly concluded that '
+                             'dst (%s) is not in src (%s)' % (dst, src))
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    def test_destinsrc_false_positive(self) -> None:
+        os.mkdir(TESTFN)
+        try:
+            for src, dst in [('srcdir', 'src/dest'), ('srcdir', 'srcdir.new')]:
+                src = os.path.join(TESTFN, src)
+                dst = os.path.join(TESTFN, dst)
+                self.assertFalse(shutil._destinsrc(src, dst),
+                            msg='_destinsrc() wrongly concluded that '
+                            'dst (%s) is in src (%s)' % (dst, src))
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+
+class TestCopyFile(unittest.TestCase):
+
+    _delete = False
+
+    class Faux(object):
+        _entered = False
+        _exited_with = None # type: tuple
+        _raised = False
+        def __init__(self, raise_in_exit: bool = False,
+                     suppress_at_exit: bool = True) -> None:
+            self._raise_in_exit = raise_in_exit
+            self._suppress_at_exit = suppress_at_exit
+        def read(self, *args: Any) -> str:
+            return ''
+        def __enter__(self) -> None:
+            self._entered = True
+        def __exit__(self, exc_type: type, exc_val: BaseException,
+                     exc_tb: TracebackType) -> bool:
+            self._exited_with = exc_type, exc_val, exc_tb
+            if self._raise_in_exit:
+                self._raised = True
+                raise IOError("Cannot close")
+            return self._suppress_at_exit
+
+    def tearDown(self) -> None:
+        shutil.open = open
+
+    def _set_shutil_open(self, func: Any) -> None:
+        shutil.open = func
+        self._delete = True
+
+    def test_w_source_open_fails(self) -> None:
+        def _open(filename: str, mode: str= 'r') -> BinaryIO:
+            if filename == 'srcfile':
+                raise IOError('Cannot open "srcfile"')
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile')
+
+    def test_w_dest_open_fails(self) -> None:
+
+        srcfile = TestCopyFile.Faux()
+
+        def _open(filename: str, mode: str = 'r') -> TestCopyFile.Faux:
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                raise IOError('Cannot open "destfile"')
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        shutil.copyfile('srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(srcfile._exited_with[0] is IOError)
+        self.assertEqual(srcfile._exited_with[1].args,
+                         ('Cannot open "destfile"',))
+
+    def test_w_dest_close_fails(self) -> None:
+
+        srcfile = TestCopyFile.Faux()
+        destfile = TestCopyFile.Faux(True)
+
+        def _open(filename: str, mode: str = 'r') -> TestCopyFile.Faux:
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                return destfile
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        shutil.copyfile('srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(destfile._entered)
+        self.assertTrue(destfile._raised)
+        self.assertTrue(srcfile._exited_with[0] is IOError)
+        self.assertEqual(srcfile._exited_with[1].args,
+                         ('Cannot close',))
+
+    def test_w_source_close_fails(self) -> None:
+
+        srcfile = TestCopyFile.Faux(True)
+        destfile = TestCopyFile.Faux()
+
+        def _open(filename: str, mode: str= 'r') -> TestCopyFile.Faux:
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                return destfile
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        self.assertRaises(IOError,
+                          shutil.copyfile, 'srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(destfile._entered)
+        self.assertFalse(destfile._raised)
+        self.assertTrue(srcfile._exited_with[0] is None)
+        self.assertTrue(srcfile._raised)
+
+    def test_move_dir_caseinsensitive(self) -> None:
+        # Renames a folder to the same name
+        # but a different case.
+
+        self.src_dir = tempfile.mkdtemp()
+        dst_dir = os.path.join(
+                os.path.dirname(self.src_dir),
+                os.path.basename(self.src_dir).upper())
+        self.assertNotEqual(self.src_dir, dst_dir)
+
+        try:
+            shutil.move(self.src_dir, dst_dir)
+            self.assertTrue(os.path.isdir(dst_dir))
+        finally:
+            if os.path.exists(dst_dir):
+                os.rmdir(dst_dir)
+
+
+
+def test_main() -> None:
+    support.run_unittest(TestShutil, TestMove, TestCopyFile)
+
+if __name__ == '__main__':
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_subprocess.py b/test-data/stdlib-samples/3.2/test/test_subprocess.py
new file mode 100644
index 0000000..772d8cc
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_subprocess.py
@@ -0,0 +1,1764 @@
+import unittest
+from test import support
+import subprocess
+import sys
+import signal
+import io
+import os
+import errno
+import tempfile
+import time
+import re
+import sysconfig
+import warnings
+import select
+import shutil
+import gc
+
+import resource
+
+from typing import Any, Dict, Callable, Iterable, List, Set, Tuple, cast
+
+mswindows = (sys.platform == "win32")
+
+#
+# Depends on the following external programs: Python
+#
+
+if mswindows:
+    SETBINARY = ('import msvcrt; msvcrt.setmode(sys.stdout.fileno(), '
+                                                'os.O_BINARY);')
+else:
+    SETBINARY = ''
+
+
+try:
+    mkstemp = tempfile.mkstemp
+except AttributeError:
+    # tempfile.mkstemp is not available
+    def _mkstemp() -> Tuple[int, str]:
+        """Replacement for mkstemp, calling mktemp."""
+        fname = tempfile.mktemp()
+        return os.open(fname, os.O_RDWR|os.O_CREAT), fname
+    mkstemp = cast(Any, _mkstemp)
+
+
+class BaseTestCase(unittest.TestCase):
+    def setUp(self) -> None:
+        # Try to minimize the number of children we have so this test
+        # doesn't crash on some buildbots (Alphas in particular).
+        support.reap_children()
+
+    def tearDown(self) -> None:
+        for inst in subprocess._active:
+            inst.wait()
+        subprocess._cleanup()
+        self.assertFalse(subprocess._active, "subprocess._active not empty")
+
+    def assertStderrEqual(self, stderr: bytes, expected: bytes,
+                          msg: object = None) -> None:
+        # In a debug build, stuff like "[6580 refs]" is printed to stderr at
+        # shutdown time.  That frustrates tests trying to check stderr produced
+        # from a spawned Python process.
+        actual = support.strip_python_stderr(stderr)
+        self.assertEqual(actual, expected, msg)
+
+
+class ProcessTestCase(BaseTestCase):
+
+    def test_call_seq(self) -> None:
+        # call() function with sequence argument
+        rc = subprocess.call([sys.executable, "-c",
+                              "import sys; sys.exit(47)"])
+        self.assertEqual(rc, 47)
+
+    def test_check_call_zero(self) -> None:
+        # check_call() function with zero return code
+        rc = subprocess.check_call([sys.executable, "-c",
+                                    "import sys; sys.exit(0)"])
+        self.assertEqual(rc, 0)
+
+    def test_check_call_nonzero(self) -> None:
+        # check_call() function with non-zero return code
+        with self.assertRaises(subprocess.CalledProcessError) as c:
+            subprocess.check_call([sys.executable, "-c",
+                                   "import sys; sys.exit(47)"])
+        self.assertEqual(c.exception.returncode, 47)
+
+    def test_check_output(self) -> None:
+        # check_output() function with zero return code
+        output = subprocess.check_output(
+                [sys.executable, "-c", "print('BDFL')"])
+        self.assertIn(b'BDFL', cast(Any, output)) # see #39
+
+    def test_check_output_nonzero(self) -> None:
+        # check_call() function with non-zero return code
+        with self.assertRaises(subprocess.CalledProcessError) as c:
+            subprocess.check_output(
+                    [sys.executable, "-c", "import sys; sys.exit(5)"])
+        self.assertEqual(c.exception.returncode, 5)
+
+    def test_check_output_stderr(self) -> None:
+        # check_output() function stderr redirected to stdout
+        output = subprocess.check_output(
+                [sys.executable, "-c", "import sys; sys.stderr.write('BDFL')"],
+                stderr=subprocess.STDOUT)
+        self.assertIn(b'BDFL', cast(Any, output)) # see #39
+
+    def test_check_output_stdout_arg(self) -> None:
+        # check_output() function stderr redirected to stdout
+        with self.assertRaises(ValueError) as c:
+            output = subprocess.check_output(
+                    [sys.executable, "-c", "print('will not be run')"],
+                    stdout=sys.stdout)
+            self.fail("Expected ValueError when stdout arg supplied.")
+        self.assertIn('stdout', c.exception.args[0])
+
+    def test_call_kwargs(self) -> None:
+        # call() function with keyword args
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "banana"
+        rc = subprocess.call([sys.executable, "-c",
+                              'import sys, os;'
+                              'sys.exit(os.getenv("FRUIT")=="banana")'],
+                             env=newenv)
+        self.assertEqual(rc, 1)
+
+    def test_invalid_args(self) -> None:
+        # Popen() called with invalid arguments should raise TypeError
+        # but Popen.__del__ should not complain (issue #12085)
+        with support.captured_stderr() as s:
+            self.assertRaises(TypeError, subprocess.Popen, invalid_arg_name=1)
+            argcount = subprocess.Popen.__init__.__code__.co_argcount
+            too_many_args = [0] * (argcount + 1)
+            self.assertRaises(TypeError, subprocess.Popen, *too_many_args)
+        self.assertEqual(s.getvalue(), '')
+
+    def test_stdin_none(self) -> None:
+        # .stdin is None when not redirected
+        p = subprocess.Popen([sys.executable, "-c", 'print("banana")'],
+                         stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        p.wait()
+        self.assertEqual(p.stdin, None)
+
+    def test_stdout_none(self) -> None:
+        # .stdout is None when not redirected
+        p = subprocess.Popen([sys.executable, "-c",
+                             'print("    this bit of output is from a '
+                             'test of stdout in a different '
+                             'process ...")'],
+                             stdin=subprocess.PIPE, stderr=subprocess.PIPE)
+        self.addCleanup(p.stdin.close)
+        self.addCleanup(p.stderr.close)
+        p.wait()
+        self.assertEqual(p.stdout, None)
+
+    def test_stderr_none(self) -> None:
+        # .stderr is None when not redirected
+        p = subprocess.Popen([sys.executable, "-c", 'print("banana")'],
+                         stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stdin.close)
+        p.wait()
+        self.assertEqual(p.stderr, None)
+
+    def test_executable_with_cwd(self) -> None:
+        python_dir = os.path.dirname(os.path.realpath(sys.executable))
+        p = subprocess.Popen(["somethingyoudonthave", "-c",
+                              "import sys; sys.exit(47)"],
+                             executable=sys.executable, cwd=python_dir)
+        p.wait()
+        self.assertEqual(p.returncode, 47)
+
+    @unittest.skipIf(sysconfig.is_python_build(),
+                     "need an installed Python. See #7774")
+    def test_executable_without_cwd(self) -> None:
+        # For a normal installation, it should work without 'cwd'
+        # argument.  For test runs in the build directory, see #7774.
+        p = subprocess.Popen(["somethingyoudonthave", "-c",
+                              "import sys; sys.exit(47)"],
+                             executable=sys.executable)
+        p.wait()
+        self.assertEqual(p.returncode, 47)
+
+    def test_stdin_pipe(self) -> None:
+        # stdin redirection
+        p = subprocess.Popen([sys.executable, "-c",
+                         'import sys; sys.exit(sys.stdin.read() == "pear")'],
+                        stdin=subprocess.PIPE)
+        p.stdin.write(b"pear")
+        p.stdin.close()
+        p.wait()
+        self.assertEqual(p.returncode, 1)
+
+    def test_stdin_filedes(self) -> None:
+        # stdin is set to open file descriptor
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        d = tf.fileno()
+        os.write(d, b"pear")
+        os.lseek(d, 0, 0)
+        p = subprocess.Popen([sys.executable, "-c",
+                         'import sys; sys.exit(sys.stdin.read() == "pear")'],
+                         stdin=d)
+        p.wait()
+        self.assertEqual(p.returncode, 1)
+
+    def test_stdin_fileobj(self) -> None:
+        # stdin is set to open file object
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        tf.write(b"pear")
+        tf.seek(0)
+        p = subprocess.Popen([sys.executable, "-c",
+                         'import sys; sys.exit(sys.stdin.read() == "pear")'],
+                         stdin=tf)
+        p.wait()
+        self.assertEqual(p.returncode, 1)
+
+    def test_stdout_pipe(self) -> None:
+        # stdout redirection
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stdout.write("orange")'],
+                         stdout=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(p.stdout.read(), b"orange")
+
+    def test_stdout_filedes(self) -> None:
+        # stdout is set to open file descriptor
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        d = tf.fileno()
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stdout.write("orange")'],
+                         stdout=d)
+        p.wait()
+        os.lseek(d, 0, 0)
+        self.assertEqual(os.read(d, 1024), b"orange")
+
+    def test_stdout_fileobj(self) -> None:
+        # stdout is set to open file object
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stdout.write("orange")'],
+                         stdout=tf)
+        p.wait()
+        tf.seek(0)
+        self.assertEqual(tf.read(), b"orange")
+
+    def test_stderr_pipe(self) -> None:
+        # stderr redirection
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stderr.write("strawberry")'],
+                         stderr=subprocess.PIPE)
+        self.addCleanup(p.stderr.close)
+        self.assertStderrEqual(p.stderr.read(), b"strawberry")
+
+    def test_stderr_filedes(self) -> None:
+        # stderr is set to open file descriptor
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        d = tf.fileno()
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stderr.write("strawberry")'],
+                         stderr=d)
+        p.wait()
+        os.lseek(d, 0, 0)
+        self.assertStderrEqual(os.read(d, 1024), b"strawberry")
+
+    def test_stderr_fileobj(self) -> None:
+        # stderr is set to open file object
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stderr.write("strawberry")'],
+                         stderr=tf)
+        p.wait()
+        tf.seek(0)
+        self.assertStderrEqual(tf.read(), b"strawberry")
+
+    def test_stdout_stderr_pipe(self) -> None:
+        # capture stdout and stderr to the same pipe
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys;'
+                              'sys.stdout.write("apple");'
+                              'sys.stdout.flush();'
+                              'sys.stderr.write("orange")'],
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.STDOUT)
+        self.addCleanup(p.stdout.close)
+        self.assertStderrEqual(p.stdout.read(), b"appleorange")
+
+    def test_stdout_stderr_file(self) -> None:
+        # capture stdout and stderr to the same open file
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys;'
+                              'sys.stdout.write("apple");'
+                              'sys.stdout.flush();'
+                              'sys.stderr.write("orange")'],
+                             stdout=tf,
+                             stderr=tf)
+        p.wait()
+        tf.seek(0)
+        self.assertStderrEqual(tf.read(), b"appleorange")
+
+    def test_stdout_filedes_of_stdout(self) -> None:
+        # stdout is set to 1 (#1531862).
+        cmd = r"import sys, os; sys.exit(os.write(sys.stdout.fileno(), b'.\n'))"
+        rc = subprocess.call([sys.executable, "-c", cmd], stdout=1)
+        self.assertEqual(rc, 2)
+
+    def test_cwd(self) -> None:
+        tmpdir = tempfile.gettempdir()
+        # We cannot use os.path.realpath to canonicalize the path,
+        # since it doesn't expand Tru64 {memb} strings. See bug 1063571.
+        cwd = os.getcwd()
+        os.chdir(tmpdir)
+        tmpdir = os.getcwd()
+        os.chdir(cwd)
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;'
+                              'sys.stdout.write(os.getcwd())'],
+                             stdout=subprocess.PIPE,
+                             cwd=tmpdir)
+        self.addCleanup(p.stdout.close)
+        normcase = os.path.normcase
+        self.assertEqual(normcase(p.stdout.read().decode("utf-8")),
+                         normcase(tmpdir))
+
+    def test_env(self) -> None:
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "orange"
+        with subprocess.Popen([sys.executable, "-c",
+                               'import sys,os;'
+                               'sys.stdout.write(os.getenv("FRUIT"))'],
+                              stdout=subprocess.PIPE,
+                              env=newenv) as p:
+            stdout, stderr = p.communicate()
+            self.assertEqual(stdout, b"orange")
+
+    # Windows requires at least the SYSTEMROOT environment variable to start
+    # Python
+    @unittest.skipIf(sys.platform == 'win32',
+                     'cannot test an empty env on Windows')
+    @unittest.skipIf(sysconfig.get_config_var('Py_ENABLE_SHARED') is not None,
+                     'the python library cannot be loaded '
+                     'with an empty environment')
+    def test_empty_env(self) -> None:
+        with subprocess.Popen([sys.executable, "-c",
+                               'import os; '
+                               'print(list(os.environ.keys()))'],
+                              stdout=subprocess.PIPE,
+                              env={}) as p:
+            stdout, stderr = p.communicate()
+            self.assertIn(stdout.strip(),
+                [b"[]",
+                 # Mac OS X adds __CF_USER_TEXT_ENCODING variable to an empty
+                 # environment
+                 b"['__CF_USER_TEXT_ENCODING']"])
+
+    def test_communicate_stdin(self) -> None:
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys;'
+                              'sys.exit(sys.stdin.read() == "pear")'],
+                             stdin=subprocess.PIPE)
+        p.communicate(b"pear")
+        self.assertEqual(p.returncode, 1)
+
+    def test_communicate_stdout(self) -> None:
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys; sys.stdout.write("pineapple")'],
+                             stdout=subprocess.PIPE)
+        (stdout, stderr) = p.communicate()
+        self.assertEqual(stdout, b"pineapple")
+        self.assertEqual(stderr, None)
+
+    def test_communicate_stderr(self) -> None:
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys; sys.stderr.write("pineapple")'],
+                             stderr=subprocess.PIPE)
+        (stdout, stderr) = p.communicate()
+        self.assertEqual(stdout, None)
+        self.assertStderrEqual(stderr, b"pineapple")
+
+    def test_communicate(self) -> None:
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;'
+                              'sys.stderr.write("pineapple");'
+                              'sys.stdout.write(sys.stdin.read())'],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        (stdout, stderr) = p.communicate(b"banana")
+        self.assertEqual(stdout, b"banana")
+        self.assertStderrEqual(stderr, b"pineapple")
+
+    # Test for the fd leak reported in http://bugs.python.org/issue2791.
+    def test_communicate_pipe_fd_leak(self) -> None:
+        for stdin_pipe in (False, True):
+            for stdout_pipe in (False, True):
+                for stderr_pipe in (False, True):
+                    options = {}  # type: Dict[str, Any]
+                    if stdin_pipe:
+                        options['stdin'] = subprocess.PIPE
+                    if stdout_pipe:
+                        options['stdout'] = subprocess.PIPE
+                    if stderr_pipe:
+                        options['stderr'] = subprocess.PIPE
+                    if not options:
+                        continue
+                    p = subprocess.Popen([sys.executable, "-c", "pass"], **options)
+                    p.communicate()
+                    if p.stdin is not None:
+                        self.assertTrue(p.stdin.closed)
+                    if p.stdout is not None:
+                        self.assertTrue(p.stdout.closed)
+                    if p.stderr is not None:
+                        self.assertTrue(p.stderr.closed)
+
+    def test_communicate_returns(self) -> None:
+        # communicate() should return None if no redirection is active
+        p = subprocess.Popen([sys.executable, "-c",
+                              "import sys; sys.exit(47)"])
+        (stdout, stderr) = p.communicate()
+        self.assertEqual(stdout, None)
+        self.assertEqual(stderr, None)
+
+    def test_communicate_pipe_buf(self) -> None:
+        # communicate() with writes larger than pipe_buf
+        # This test will probably deadlock rather than fail, if
+        # communicate() does not work properly.
+        x, y = os.pipe()
+        if mswindows:
+            pipe_buf = 512
+        else:
+            pipe_buf = os.fpathconf(x, "PC_PIPE_BUF")
+        os.close(x)
+        os.close(y)
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;'
+                              'sys.stdout.write(sys.stdin.read(47));'
+                              'sys.stderr.write("xyz"*%d);'
+                              'sys.stdout.write(sys.stdin.read())' % pipe_buf],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        string_to_write = b"abc"*pipe_buf
+        (stdout, stderr) = p.communicate(string_to_write)
+        self.assertEqual(stdout, string_to_write)
+
+    def test_writes_before_communicate(self) -> None:
+        # stdin.write before communicate()
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;'
+                              'sys.stdout.write(sys.stdin.read())'],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        p.stdin.write(b"banana")
+        (stdout, stderr) = p.communicate(b"split")
+        self.assertEqual(stdout, b"bananasplit")
+        self.assertStderrEqual(stderr, b"")
+
+    def test_universal_newlines(self) -> None:
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;' + SETBINARY +
+                              'sys.stdout.write(sys.stdin.readline());'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line2\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write(sys.stdin.read());'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line4\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line5\\r\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line6\\r");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("\\nline7");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("\\nline8");'],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             universal_newlines=1)
+        p.stdin.write("line1\n")
+        self.assertEqual(p.stdout.readline(), "line1\n")
+        p.stdin.write("line3\n")
+        p.stdin.close()
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(p.stdout.readline(),
+                         "line2\n")
+        self.assertEqual(p.stdout.read(6),
+                         "line3\n")
+        self.assertEqual(p.stdout.read(),
+                         "line4\nline5\nline6\nline7\nline8")
+
+    def test_universal_newlines_communicate(self) -> None:
+        # universal newlines through communicate()
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;' + SETBINARY +
+                              'sys.stdout.write("line2\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line4\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line5\\r\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line6\\r");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("\\nline7");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("\\nline8");'],
+                             stderr=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             universal_newlines=1)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        # BUG: can't give a non-empty stdin because it breaks both the
+        # select- and poll-based communicate() implementations.
+        (stdout, stderr) = p.communicate()
+        self.assertEqual(stdout,
+                         "line2\nline4\nline5\nline6\nline7\nline8")
+
+    def test_universal_newlines_communicate_stdin(self) -> None:
+        # universal newlines through communicate(), with only stdin
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;' + SETBINARY + '''\nif True:
+                                  s = sys.stdin.readline()
+                                  assert s == "line1\\n", repr(s)
+                                  s = sys.stdin.read()
+                                  assert s == "line3\\n", repr(s)
+                              '''],
+                             stdin=subprocess.PIPE,
+                             universal_newlines=1)
+        (stdout, stderr) = p.communicate("line1\nline3\n")
+        self.assertEqual(p.returncode, 0)
+
+    def test_no_leaking(self) -> None:
+        # Make sure we leak no resources
+        if not mswindows:
+            max_handles = 1026 # too much for most UNIX systems
+        else:
+            max_handles = 2050 # too much for (at least some) Windows setups
+        handles = []  # type: List[int]
+        tmpdir = tempfile.mkdtemp()
+        try:
+            for i in range(max_handles):
+                try:
+                    tmpfile = os.path.join(tmpdir, support.TESTFN)
+                    handles.append(os.open(tmpfile, os.O_WRONLY|os.O_CREAT))
+                except OSError as e:
+                    if e.errno != errno.EMFILE:
+                        raise
+                    break
+            else:
+                self.skipTest("failed to reach the file descriptor limit "
+                    "(tried %d)" % max_handles)
+            # Close a couple of them (should be enough for a subprocess)
+            for i in range(10):
+                os.close(handles.pop())
+            # Loop creating some subprocesses. If one of them leaks some fds,
+            # the next loop iteration will fail by reaching the max fd limit.
+            for i in range(15):
+                p = subprocess.Popen([sys.executable, "-c",
+                                      "import sys;"
+                                      "sys.stdout.write(sys.stdin.read())"],
+                                     stdin=subprocess.PIPE,
+                                     stdout=subprocess.PIPE,
+                                     stderr=subprocess.PIPE)
+                data = p.communicate(b"lime")[0]
+                self.assertEqual(data, b"lime")
+        finally:
+            for h in handles:
+                os.close(h)
+            shutil.rmtree(tmpdir)
+
+    def test_list2cmdline(self) -> None:
+        self.assertEqual(subprocess.list2cmdline(['a b c', 'd', 'e']),
+                         '"a b c" d e')
+        self.assertEqual(subprocess.list2cmdline(['ab"c', '\\', 'd']),
+                         'ab\\"c \\ d')
+        self.assertEqual(subprocess.list2cmdline(['ab"c', ' \\', 'd']),
+                         'ab\\"c " \\\\" d')
+        self.assertEqual(subprocess.list2cmdline(['a\\\\\\b', 'de fg', 'h']),
+                         'a\\\\\\b "de fg" h')
+        self.assertEqual(subprocess.list2cmdline(['a\\"b', 'c', 'd']),
+                         'a\\\\\\"b c d')
+        self.assertEqual(subprocess.list2cmdline(['a\\\\b c', 'd', 'e']),
+                         '"a\\\\b c" d e')
+        self.assertEqual(subprocess.list2cmdline(['a\\\\b\\ c', 'd', 'e']),
+                         '"a\\\\b\\ c" d e')
+        self.assertEqual(subprocess.list2cmdline(['ab', '']),
+                         'ab ""')
+
+
+    def test_poll(self) -> None:
+        p = subprocess.Popen([sys.executable,
+                          "-c", "import time; time.sleep(1)"])
+        count = 0
+        while p.poll() is None:
+            time.sleep(0.1)
+            count += 1
+        # We expect that the poll loop probably went around about 10 times,
+        # but, based on system scheduling we can't control, it's possible
+        # poll() never returned None.  It "should be" very rare that it
+        # didn't go around at least twice.
+        self.assertGreaterEqual(count, 2)
+        # Subsequent invocations should just return the returncode
+        self.assertEqual(p.poll(), 0)
+
+
+    def test_wait(self) -> None:
+        p = subprocess.Popen([sys.executable,
+                          "-c", "import time; time.sleep(2)"])
+        self.assertEqual(p.wait(), 0)
+        # Subsequent invocations should just return the returncode
+        self.assertEqual(p.wait(), 0)
+
+
+    def test_invalid_bufsize(self) -> None:
+        # an invalid type of the bufsize argument should raise
+        # TypeError.
+        with self.assertRaises(TypeError):
+            subprocess.Popen([sys.executable, "-c", "pass"], cast(Any, "orange"))
+
+    def test_bufsize_is_none(self) -> None:
+        # bufsize=None should be the same as bufsize=0.
+        p = subprocess.Popen([sys.executable, "-c", "pass"], None)
+        self.assertEqual(p.wait(), 0)
+        # Again with keyword arg
+        p = subprocess.Popen([sys.executable, "-c", "pass"], bufsize=None)
+        self.assertEqual(p.wait(), 0)
+
+    def test_leaking_fds_on_error(self) -> None:
+        # see bug #5179: Popen leaks file descriptors to PIPEs if
+        # the child fails to execute; this will eventually exhaust
+        # the maximum number of open fds. 1024 seems a very common
+        # value for that limit, but Windows has 2048, so we loop
+        # 1024 times (each call leaked two fds).
+        for i in range(1024):
+            # Windows raises IOError.  Others raise OSError.
+            with self.assertRaises(EnvironmentError) as c:
+                subprocess.Popen(['nonexisting_i_hope'],
+                                 stdout=subprocess.PIPE,
+                                 stderr=subprocess.PIPE)
+            # ignore errors that indicate the command was not found
+            if c.exception.errno not in (errno.ENOENT, errno.EACCES):
+                raise c.exception
+
+    def test_issue8780(self) -> None:
+        # Ensure that stdout is inherited from the parent
+        # if stdout=PIPE is not used
+        code = ';'.join([
+            'import subprocess, sys',
+            'retcode = subprocess.call('
+                "[sys.executable, '-c', 'print(\"Hello World!\")'])",
+            'assert retcode == 0'])
+        output = subprocess.check_output([sys.executable, '-c', code])
+        self.assertTrue(output.startswith(b'Hello World!'), ascii(output))
+
+    def test_handles_closed_on_exception(self) -> None:
+        # If CreateProcess exits with an error, ensure the
+        # duplicate output handles are released
+        ifhandle, ifname = mkstemp()
+        ofhandle, ofname = mkstemp()
+        efhandle, efname = mkstemp()
+        try:
+            subprocess.Popen (["*"], stdin=ifhandle, stdout=ofhandle,
+              stderr=efhandle)
+        except OSError:
+            os.close(ifhandle)
+            os.remove(ifname)
+            os.close(ofhandle)
+            os.remove(ofname)
+            os.close(efhandle)
+            os.remove(efname)
+        self.assertFalse(os.path.exists(ifname))
+        self.assertFalse(os.path.exists(ofname))
+        self.assertFalse(os.path.exists(efname))
+
+    def test_communicate_epipe(self) -> None:
+        # Issue 10963: communicate() should hide EPIPE
+        p = subprocess.Popen([sys.executable, "-c", 'pass'],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        p.communicate(b"x" * 2**20)
+
+    def test_communicate_epipe_only_stdin(self) -> None:
+        # Issue 10963: communicate() should hide EPIPE
+        p = subprocess.Popen([sys.executable, "-c", 'pass'],
+                             stdin=subprocess.PIPE)
+        self.addCleanup(p.stdin.close)
+        time.sleep(2)
+        p.communicate(b"x" * 2**20)
+
+    @unittest.skipUnless(hasattr(signal, 'SIGALRM'),
+                         "Requires signal.SIGALRM")
+    def test_communicate_eintr(self) -> None:
+        # Issue #12493: communicate() should handle EINTR
+        def handler(signum, frame):
+            pass
+        old_handler = signal.signal(signal.SIGALRM, handler)
+        self.addCleanup(signal.signal, signal.SIGALRM, old_handler)
+
+        # the process is running for 2 seconds
+        args = [sys.executable, "-c", 'import time; time.sleep(2)']
+        for stream in ('stdout', 'stderr'):
+            kw = {stream: subprocess.PIPE}  # type: Dict[str, Any]
+            with subprocess.Popen(args, **kw) as process:
+                signal.alarm(1)
+                # communicate() will be interrupted by SIGALRM
+                process.communicate()
+
+
+# context manager
+class _SuppressCoreFiles(object):
+    """Try to prevent core files from being created."""
+    old_limit = None # type: Tuple[int, int]
+
+    def __enter__(self) -> None:
+        """Try to save previous ulimit, then set it to (0, 0)."""
+        if resource is not None:
+            try:
+                self.old_limit = resource.getrlimit(resource.RLIMIT_CORE)
+                resource.setrlimit(resource.RLIMIT_CORE, (0, 0))
+            except (ValueError, resource.error):
+                pass
+
+        if sys.platform == 'darwin':
+            # Check if the 'Crash Reporter' on OSX was configured
+            # in 'Developer' mode and warn that it will get triggered
+            # when it is.
+            #
+            # This assumes that this context manager is used in tests
+            # that might trigger the next manager.
+            value = subprocess.Popen(['/usr/bin/defaults', 'read',
+                    'com.apple.CrashReporter', 'DialogType'],
+                    stdout=subprocess.PIPE).communicate()[0]
+            if value.strip() == b'developer':
+                print("this tests triggers the Crash Reporter, "
+                      "that is intentional", end='')
+                sys.stdout.flush()
+
+    def __exit__(self, *args: Any) -> None:
+        """Return core file behavior to default."""
+        if self.old_limit is None:
+            return
+        if resource is not None:
+            try:
+                resource.setrlimit(resource.RLIMIT_CORE, self.old_limit)
+            except (ValueError, resource.error):
+                pass
+
+
+ at unittest.skipIf(mswindows, "POSIX specific tests")
+class POSIXProcessTestCase(BaseTestCase):
+
+    def test_exceptions(self) -> None:
+        nonexistent_dir = "/_this/pa.th/does/not/exist"
+        try:
+            os.chdir(nonexistent_dir)
+        except OSError as e:
+            # This avoids hard coding the errno value or the OS perror()
+            # string and instead capture the exception that we want to see
+            # below for comparison.
+            desired_exception = e
+            desired_exception.strerror += ': ' + repr(sys.executable)
+        else:
+            self.fail("chdir to nonexistant directory %s succeeded." %
+                      nonexistent_dir)
+
+        # Error in the child re-raised in the parent.
+        try:
+            p = subprocess.Popen([sys.executable, "-c", ""],
+                                 cwd=nonexistent_dir)
+        except OSError as e:
+            # Test that the child process chdir failure actually makes
+            # it up to the parent process as the correct exception.
+            self.assertEqual(desired_exception.errno, e.errno)
+            self.assertEqual(desired_exception.strerror, e.strerror)
+        else:
+            self.fail("Expected OSError: %s" % desired_exception)
+
+    def test_restore_signals(self) -> None:
+        # Code coverage for both values of restore_signals to make sure it
+        # at least does not blow up.
+        # A test for behavior would be complex.  Contributions welcome.
+        subprocess.call([sys.executable, "-c", ""], restore_signals=True)
+        subprocess.call([sys.executable, "-c", ""], restore_signals=False)
+
+    def test_start_new_session(self) -> None:
+        # For code coverage of calling setsid().  We don't care if we get an
+        # EPERM error from it depending on the test execution environment, that
+        # still indicates that it was called.
+        try:
+            output = subprocess.check_output(
+                    [sys.executable, "-c",
+                     "import os; print(os.getpgid(os.getpid()))"],
+                    start_new_session=True)
+        except OSError as e:
+            if e.errno != errno.EPERM:
+                raise
+        else:
+            parent_pgid = os.getpgid(os.getpid())
+            child_pgid = int(output)
+            self.assertNotEqual(parent_pgid, child_pgid)
+
+    def test_run_abort(self) -> None:
+        # returncode handles signal termination
+        with _SuppressCoreFiles():
+            p = subprocess.Popen([sys.executable, "-c",
+                                  'import os; os.abort()'])
+            p.wait()
+        self.assertEqual(-p.returncode, signal.SIGABRT)
+
+    def test_preexec(self) -> None:
+        # DISCLAIMER: Setting environment variables is *not* a good use
+        # of a preexec_fn.  This is merely a test.
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;'
+                              'sys.stdout.write(os.getenv("FRUIT"))'],
+                             stdout=subprocess.PIPE,
+                             preexec_fn=lambda: os.putenv("FRUIT", "apple"))
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(p.stdout.read(), b"apple")
+
+    def test_preexec_exception(self) -> None:
+        def raise_it():
+            raise ValueError("What if two swallows carried a coconut?")
+        try:
+            p = subprocess.Popen([sys.executable, "-c", ""],
+                                 preexec_fn=raise_it)
+        except RuntimeError as e:
+            self.assertTrue(
+                    subprocess._posixsubprocess,
+                    "Expected a ValueError from the preexec_fn")
+        except ValueError as e2:
+            self.assertIn("coconut", e2.args[0])
+        else:
+            self.fail("Exception raised by preexec_fn did not make it "
+                      "to the parent process.")
+
+    def test_preexec_gc_module_failure(self) -> None:
+        # This tests the code that disables garbage collection if the child
+        # process will execute any Python.
+        def raise_runtime_error():
+            raise RuntimeError("this shouldn't escape")
+        enabled = gc.isenabled()
+        orig_gc_disable = gc.disable
+        orig_gc_isenabled = gc.isenabled
+        try:
+            gc.disable()
+            self.assertFalse(gc.isenabled())
+            subprocess.call([sys.executable, '-c', ''],
+                            preexec_fn=lambda: None)
+            self.assertFalse(gc.isenabled(),
+                             "Popen enabled gc when it shouldn't.")
+
+            gc.enable()
+            self.assertTrue(gc.isenabled())
+            subprocess.call([sys.executable, '-c', ''],
+                            preexec_fn=lambda: None)
+            self.assertTrue(gc.isenabled(), "Popen left gc disabled.")
+
+            setattr(gc, 'disable', raise_runtime_error)
+            self.assertRaises(RuntimeError, subprocess.Popen,
+                              [sys.executable, '-c', ''],
+                              preexec_fn=lambda: None)
+
+            del gc.isenabled  # force an AttributeError
+            self.assertRaises(AttributeError, subprocess.Popen,
+                              [sys.executable, '-c', ''],
+                              preexec_fn=lambda: None)
+        finally:
+            setattr(gc, 'disable', orig_gc_disable)
+            setattr(gc, 'isenabled', orig_gc_isenabled)
+            if not enabled:
+                gc.disable()
+
+    def test_args_string(self) -> None:
+        # args is a string
+        fd, fname = mkstemp()
+        # reopen in text mode
+        with open(fd, "w", errors=cast(Any, "surrogateescape")) as fobj: # see #260
+            fobj.write("#!/bin/sh\n")
+            fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" %
+                       sys.executable)
+        os.chmod(fname, 0o700)
+        p = subprocess.Popen(fname)
+        p.wait()
+        os.remove(fname)
+        self.assertEqual(p.returncode, 47)
+
+    def test_invalid_args(self) -> None:
+        # invalid arguments should raise ValueError
+        self.assertRaises(ValueError, subprocess.call,
+                          [sys.executable, "-c",
+                           "import sys; sys.exit(47)"],
+                          startupinfo=47)
+        self.assertRaises(ValueError, subprocess.call,
+                          [sys.executable, "-c",
+                           "import sys; sys.exit(47)"],
+                          creationflags=47)
+
+    def test_shell_sequence(self) -> None:
+        # Run command through the shell (sequence)
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "apple"
+        p = subprocess.Popen(["echo $FRUIT"], shell=1,
+                             stdout=subprocess.PIPE,
+                             env=newenv)
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple")
+
+    def test_shell_string(self) -> None:
+        # Run command through the shell (string)
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "apple"
+        p = subprocess.Popen("echo $FRUIT", shell=1,
+                             stdout=subprocess.PIPE,
+                             env=newenv)
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple")
+
+    def test_call_string(self) -> None:
+        # call() function with string argument on UNIX
+        fd, fname = mkstemp()
+        # reopen in text mode
+        with open(fd, "w", errors=cast(Any, "surrogateescape")) as fobj: # see #260
+            fobj.write("#!/bin/sh\n")
+            fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" %
+                       sys.executable)
+        os.chmod(fname, 0o700)
+        rc = subprocess.call(fname)
+        os.remove(fname)
+        self.assertEqual(rc, 47)
+
+    def test_specific_shell(self) -> None:
+        # Issue #9265: Incorrect name passed as arg[0].
+        shells = []  # type: List[str]
+        for prefix in ['/bin', '/usr/bin/', '/usr/local/bin']:
+            for name in ['bash', 'ksh']:
+                sh = os.path.join(prefix, name)
+                if os.path.isfile(sh):
+                    shells.append(sh)
+        if not shells: # Will probably work for any shell but csh.
+            self.skipTest("bash or ksh required for this test")
+        sh = '/bin/sh'
+        if os.path.isfile(sh) and not os.path.islink(sh):
+            # Test will fail if /bin/sh is a symlink to csh.
+            shells.append(sh)
+        for sh in shells:
+            p = subprocess.Popen("echo $0", executable=sh, shell=True,
+                                 stdout=subprocess.PIPE)
+            self.addCleanup(p.stdout.close)
+            self.assertEqual(p.stdout.read().strip(), bytes(sh, 'ascii'))
+
+    def _kill_process(self, method: str, *args: Any) -> subprocess.Popen:
+        # Do not inherit file handles from the parent.
+        # It should fix failures on some platforms.
+        p = subprocess.Popen([sys.executable, "-c", """if 1:
+                             import sys, time
+                             sys.stdout.write('x\\n')
+                             sys.stdout.flush()
+                             time.sleep(30)
+                             """],
+                             close_fds=True,
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        # Wait for the interpreter to be completely initialized before
+        # sending any signal.
+        p.stdout.read(1)
+        getattr(p, method)(*args)
+        return p
+
+    def test_send_signal(self) -> None:
+        p = self._kill_process('send_signal', signal.SIGINT)
+        _, stderr = p.communicate()
+        self.assertIn(b'KeyboardInterrupt', stderr)
+        self.assertNotEqual(p.wait(), 0)
+
+    def test_kill(self) -> None:
+        p = self._kill_process('kill')
+        _, stderr = p.communicate()
+        self.assertStderrEqual(stderr, b'')
+        self.assertEqual(p.wait(), -signal.SIGKILL)
+
+    def test_terminate(self) -> None:
+        p = self._kill_process('terminate')
+        _, stderr = p.communicate()
+        self.assertStderrEqual(stderr, b'')
+        self.assertEqual(p.wait(), -signal.SIGTERM)
+
+    def check_close_std_fds(self, fds: Iterable[int]) -> None:
+        # Issue #9905: test that subprocess pipes still work properly with
+        # some standard fds closed
+        stdin = 0
+        newfds = []  # type: List[int]
+        for a in fds:
+            b = os.dup(a)
+            newfds.append(b)
+            if a == 0:
+                stdin = b
+        try:
+            for fd in fds:
+                os.close(fd)
+            out, err = subprocess.Popen([sys.executable, "-c",
+                              'import sys;'
+                              'sys.stdout.write("apple");'
+                              'sys.stdout.flush();'
+                              'sys.stderr.write("orange")'],
+                       stdin=stdin,
+                       stdout=subprocess.PIPE,
+                       stderr=subprocess.PIPE).communicate()
+            err = support.strip_python_stderr(err)
+            self.assertEqual((out, err), (b'apple', b'orange'))
+        finally:
+            for b, a in zip(newfds, fds):
+                os.dup2(b, a)
+            for b in newfds:
+                os.close(b)
+
+    def test_close_fd_0(self) -> None:
+        self.check_close_std_fds([0])
+
+    def test_close_fd_1(self) -> None:
+        self.check_close_std_fds([1])
+
+    def test_close_fd_2(self) -> None:
+        self.check_close_std_fds([2])
+
+    def test_close_fds_0_1(self) -> None:
+        self.check_close_std_fds([0, 1])
+
+    def test_close_fds_0_2(self) -> None:
+        self.check_close_std_fds([0, 2])
+
+    def test_close_fds_1_2(self) -> None:
+        self.check_close_std_fds([1, 2])
+
+    def test_close_fds_0_1_2(self) -> None:
+        # Issue #10806: test that subprocess pipes still work properly with
+        # all standard fds closed.
+        self.check_close_std_fds([0, 1, 2])
+
+    def test_remapping_std_fds(self) -> None:
+        # open up some temporary files
+        temps = [mkstemp() for i in range(3)]
+        try:
+            temp_fds = [fd for fd, fname in temps]
+
+            # unlink the files -- we won't need to reopen them
+            for fd, fname in temps:
+                os.unlink(fname)
+
+            # write some data to what will become stdin, and rewind
+            os.write(temp_fds[1], b"STDIN")
+            os.lseek(temp_fds[1], 0, 0)
+
+            # move the standard file descriptors out of the way
+            saved_fds = [os.dup(fd) for fd in range(3)]
+            try:
+                # duplicate the file objects over the standard fd's
+                for fd, temp_fd in enumerate(temp_fds):
+                    os.dup2(temp_fd, fd)
+
+                # now use those files in the "wrong" order, so that subprocess
+                # has to rearrange them in the child
+                p = subprocess.Popen([sys.executable, "-c",
+                    'import sys; got = sys.stdin.read();'
+                    'sys.stdout.write("got %s"%got); sys.stderr.write("err")'],
+                    stdin=temp_fds[1],
+                    stdout=temp_fds[2],
+                    stderr=temp_fds[0])
+                p.wait()
+            finally:
+                # restore the original fd's underneath sys.stdin, etc.
+                for std, saved in enumerate(saved_fds):
+                    os.dup2(saved, std)
+                    os.close(saved)
+
+            for fd in temp_fds:
+                os.lseek(fd, 0, 0)
+
+            out = os.read(temp_fds[2], 1024)
+            err = support.strip_python_stderr(os.read(temp_fds[0], 1024))
+            self.assertEqual(out, b"got STDIN")
+            self.assertEqual(err, b"err")
+
+        finally:
+            for fd in temp_fds:
+                os.close(fd)
+
+    def check_swap_fds(self, stdin_no: int, stdout_no: int,
+                       stderr_no: int) -> None:
+        # open up some temporary files
+        temps = [mkstemp() for i in range(3)]
+        temp_fds = [fd for fd, fname in temps]
+        try:
+            # unlink the files -- we won't need to reopen them
+            for fd, fname in temps:
+                os.unlink(fname)
+
+            # save a copy of the standard file descriptors
+            saved_fds = [os.dup(fd) for fd in range(3)]
+            try:
+                # duplicate the temp files over the standard fd's 0, 1, 2
+                for fd, temp_fd in enumerate(temp_fds):
+                    os.dup2(temp_fd, fd)
+
+                # write some data to what will become stdin, and rewind
+                os.write(stdin_no, b"STDIN")
+                os.lseek(stdin_no, 0, 0)
+
+                # now use those files in the given order, so that subprocess
+                # has to rearrange them in the child
+                p = subprocess.Popen([sys.executable, "-c",
+                    'import sys; got = sys.stdin.read();'
+                    'sys.stdout.write("got %s"%got); sys.stderr.write("err")'],
+                    stdin=stdin_no,
+                    stdout=stdout_no,
+                    stderr=stderr_no)
+                p.wait()
+
+                for fd in temp_fds:
+                    os.lseek(fd, 0, 0)
+
+                out = os.read(stdout_no, 1024)
+                err = support.strip_python_stderr(os.read(stderr_no, 1024))
+            finally:
+                for std, saved in enumerate(saved_fds):
+                    os.dup2(saved, std)
+                    os.close(saved)
+
+            self.assertEqual(out, b"got STDIN")
+            self.assertEqual(err, b"err")
+
+        finally:
+            for fd in temp_fds:
+                os.close(fd)
+
+    # When duping fds, if there arises a situation where one of the fds is
+    # either 0, 1 or 2, it is possible that it is overwritten (#12607).
+    # This tests all combinations of this.
+    def test_swap_fds(self) -> None:
+        self.check_swap_fds(0, 1, 2)
+        self.check_swap_fds(0, 2, 1)
+        self.check_swap_fds(1, 0, 2)
+        self.check_swap_fds(1, 2, 0)
+        self.check_swap_fds(2, 0, 1)
+        self.check_swap_fds(2, 1, 0)
+
+    def test_surrogates_error_message(self) -> None:
+        def prepare() -> None:
+            raise ValueError("surrogate:\uDCff")
+
+        try:
+            subprocess.call(
+                [sys.executable, "-c", "pass"],
+                preexec_fn=prepare)
+        except ValueError as err:
+            # Pure Python implementations keeps the message
+            self.assertIsNone(subprocess._posixsubprocess)
+            self.assertEqual(str(err), "surrogate:\uDCff")
+        except RuntimeError as err2:
+            # _posixsubprocess uses a default message
+            self.assertIsNotNone(subprocess._posixsubprocess)
+            self.assertEqual(str(err2), "Exception occurred in preexec_fn.")
+        else:
+            self.fail("Expected ValueError or RuntimeError")
+
+    def test_undecodable_env(self) -> None:
+        for key, value in (('test', 'abc\uDCFF'), ('test\uDCFF', '42')):
+            # test str with surrogates
+            script = "import os; print(ascii(os.getenv(%s)))" % repr(key)
+            env = os.environ.copy()
+            env[key] = value
+            # Use C locale to get ascii for the locale encoding to force
+            # surrogate-escaping of \xFF in the child process; otherwise it can
+            # be decoded as-is if the default locale is latin-1.
+            env['LC_ALL'] = 'C'
+            stdout = subprocess.check_output(
+                [sys.executable, "-c", script],
+                env=env)
+            stdout = stdout.rstrip(b'\n\r')
+            self.assertEqual(stdout.decode('ascii'), ascii(value))
+
+            # test bytes
+            keyb = key.encode("ascii", "surrogateescape")
+            valueb = value.encode("ascii", "surrogateescape")
+            script = "import os; print(ascii(os.getenvb(%s)))" % repr(keyb)
+            envb = dict(os.environ.copy().items())  # type: Dict[Any, Any]
+            envb[keyb] = valueb
+            stdout = subprocess.check_output(
+                [sys.executable, "-c", script],
+                env=envb)
+            stdout = stdout.rstrip(b'\n\r')
+            self.assertEqual(stdout.decode('ascii'), ascii(valueb))
+
+    def test_bytes_program(self) -> None:
+        abs_program = os.fsencode(sys.executable)
+        path, programs = os.path.split(sys.executable)
+        program = os.fsencode(programs)
+
+        # absolute bytes path
+        exitcode = subprocess.call([abs_program, "-c", "pass"])
+        self.assertEqual(exitcode, 0)
+
+        # bytes program, unicode PATH
+        env = os.environ.copy()
+        env["PATH"] = path
+        exitcode = subprocess.call([program, "-c", "pass"], env=env)
+        self.assertEqual(exitcode, 0)
+
+        # bytes program, bytes PATH
+        envb = os.environb.copy()
+        envb[b"PATH"] = os.fsencode(path)
+        exitcode = subprocess.call([program, "-c", "pass"], env=envb)
+        self.assertEqual(exitcode, 0)
+
+    def test_pipe_cloexec(self) -> None:
+        sleeper = support.findfile("input_reader.py", subdir="subprocessdata")
+        fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
+
+        p1 = subprocess.Popen([sys.executable, sleeper],
+                              stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+                              stderr=subprocess.PIPE, close_fds=False)
+
+        self.addCleanup(p1.communicate, b'')
+
+        p2 = subprocess.Popen([sys.executable, fd_status],
+                              stdout=subprocess.PIPE, close_fds=False)
+
+        output, error = p2.communicate()
+        result_fds = set(map(int, output.split(b',')))
+        unwanted_fds = set([p1.stdin.fileno(), p1.stdout.fileno(),
+                            p1.stderr.fileno()])
+
+        self.assertFalse(result_fds & unwanted_fds,
+                         "Expected no fds from %r to be open in child, "
+                         "found %r" %
+                              (unwanted_fds, result_fds & unwanted_fds))
+
+    def test_pipe_cloexec_real_tools(self) -> None:
+        qcat = support.findfile("qcat.py", subdir="subprocessdata")
+        qgrep = support.findfile("qgrep.py", subdir="subprocessdata")
+
+        subdata = b'zxcvbn'
+        data = subdata * 4 + b'\n'
+
+        p1 = subprocess.Popen([sys.executable, qcat],
+                              stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+                              close_fds=False)
+
+        p2 = subprocess.Popen([sys.executable, qgrep, subdata],
+                              stdin=p1.stdout, stdout=subprocess.PIPE,
+                              close_fds=False)
+
+        self.addCleanup(p1.wait)
+        self.addCleanup(p2.wait)
+        def kill_p1() -> None:
+            #try:
+            p1.terminate()
+            #except ProcessLookupError:
+            #    pass
+        def kill_p2() -> None:
+            #try:
+            p2.terminate()
+            #except ProcessLookupError:
+            #    pass
+        self.addCleanup(kill_p1)
+        self.addCleanup(kill_p2)
+
+        p1.stdin.write(data)
+        p1.stdin.close()
+
+        readfiles, ignored1, ignored2 = select.select([p2.stdout], [], [], 10)
+
+        self.assertTrue(readfiles, "The child hung")
+        self.assertEqual(p2.stdout.read(), data)
+
+        p1.stdout.close()
+        p2.stdout.close()
+
+    def test_close_fds(self) -> None:
+        fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
+
+        fds = os.pipe()
+        self.addCleanup(os.close, fds[0])
+        self.addCleanup(os.close, fds[1])
+
+        open_fds = set([fds[0], fds[1]])
+        # add a bunch more fds
+        for _ in range(9):
+            fd = os.open("/dev/null", os.O_RDONLY)
+            self.addCleanup(os.close, fd)
+            open_fds.add(fd)
+
+        p = subprocess.Popen([sys.executable, fd_status],
+                             stdout=subprocess.PIPE, close_fds=False)
+        output, ignored = p.communicate()
+        remaining_fds = set(map(int, output.split(b',')))
+
+        self.assertEqual(remaining_fds & open_fds, open_fds,
+                         "Some fds were closed")
+
+        p = subprocess.Popen([sys.executable, fd_status],
+                             stdout=subprocess.PIPE, close_fds=True)
+        output, ignored = p.communicate()
+        remaining_fds = set(map(int, output.split(b',')))
+
+        self.assertFalse(remaining_fds & open_fds,
+                         "Some fds were left open")
+        self.assertIn(1, remaining_fds, "Subprocess failed")
+
+        # Keep some of the fd's we opened open in the subprocess.
+        # This tests _posixsubprocess.c's proper handling of fds_to_keep.
+        fds_to_keep = set(open_fds.pop() for _ in range(8))
+        p = subprocess.Popen([sys.executable, fd_status],
+                             stdout=subprocess.PIPE, close_fds=True,
+                             pass_fds=())
+        output, ignored = p.communicate()
+        remaining_fds = set(map(int, output.split(b',')))
+
+        self.assertFalse(remaining_fds & fds_to_keep & open_fds,
+                         "Some fds not in pass_fds were left open")
+        self.assertIn(1, remaining_fds, "Subprocess failed")
+
+    # Mac OS X Tiger (10.4) has a kernel bug: sometimes, the file
+    # descriptor of a pipe closed in the parent process is valid in the
+    # child process according to fstat(), but the mode of the file
+    # descriptor is invalid, and read or write raise an error.
+    @support.requires_mac_ver(10, 5)
+    def test_pass_fds(self) -> None:
+        fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
+
+        open_fds = set()  # type: Set[int]
+
+        for x in range(5):
+            fds = os.pipe()
+            self.addCleanup(os.close, fds[0])
+            self.addCleanup(os.close, fds[1])
+            open_fds.update([fds[0], fds[1]])
+
+        for fd in open_fds:
+            p = subprocess.Popen([sys.executable, fd_status],
+                                 stdout=subprocess.PIPE, close_fds=True,
+                                 pass_fds=(fd, ))
+            output, ignored = p.communicate()
+
+            remaining_fds = set(map(int, output.split(b',')))
+            to_be_closed = open_fds - {fd}
+
+            self.assertIn(fd, remaining_fds, "fd to be passed not passed")
+            self.assertFalse(remaining_fds & to_be_closed,
+                             "fd to be closed passed")
+
+            # pass_fds overrides close_fds with a warning.
+            with self.assertWarns(RuntimeWarning) as context:
+                self.assertFalse(subprocess.call(
+                        [sys.executable, "-c", "import sys; sys.exit(0)"],
+                        close_fds=False, pass_fds=(fd, )))
+            self.assertIn('overriding close_fds', str(context.warning))
+
+    def test_stdout_stdin_are_single_inout_fd(self) -> None:
+        with io.open(os.devnull, "r+") as inout:
+            p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
+                                 stdout=inout, stdin=inout)
+            p.wait()
+
+    def test_stdout_stderr_are_single_inout_fd(self) -> None:
+        with io.open(os.devnull, "r+") as inout:
+            p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
+                                 stdout=inout, stderr=inout)
+            p.wait()
+
+    def test_stderr_stdin_are_single_inout_fd(self) -> None:
+        with io.open(os.devnull, "r+") as inout:
+            p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
+                                 stderr=inout, stdin=inout)
+            p.wait()
+
+    def test_wait_when_sigchild_ignored(self) -> None:
+        # NOTE: sigchild_ignore.py may not be an effective test on all OSes.
+        sigchild_ignore = support.findfile("sigchild_ignore.py",
+                                           subdir="subprocessdata")
+        p = subprocess.Popen([sys.executable, sigchild_ignore],
+                             stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stdout, stderr = p.communicate()
+        self.assertEqual(0, p.returncode, "sigchild_ignore.py exited"
+                         " non-zero with this error:\n%s" %
+                         stderr.decode('utf8'))
+
+    def test_select_unbuffered(self) -> None:
+        # Issue #11459: bufsize=0 should really set the pipes as
+        # unbuffered (and therefore let select() work properly).
+        select = support.import_module("select")
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys;'
+                              'sys.stdout.write("apple")'],
+                             stdout=subprocess.PIPE,
+                             bufsize=0)
+        f = p.stdout
+        self.addCleanup(f.close)
+        try:
+            self.assertEqual(f.read(4), b"appl")
+            self.assertIn(f, select.select([f], [], [], 0.0)[0])
+        finally:
+            p.wait()
+
+    def test_zombie_fast_process_del(self) -> None:
+        # Issue #12650: on Unix, if Popen.__del__() was called before the
+        # process exited, it wouldn't be added to subprocess._active, and would
+        # remain a zombie.
+        # spawn a Popen, and delete its reference before it exits
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys, time;'
+                              'time.sleep(0.2)'],
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        ident = id(p)
+        pid = p.pid
+        del p
+        # check that p is in the active processes list
+        self.assertIn(ident, [id(o) for o in subprocess._active])
+
+    def test_leak_fast_process_del_killed(self) -> None:
+        # Issue #12650: on Unix, if Popen.__del__() was called before the
+        # process exited, and the process got killed by a signal, it would never
+        # be removed from subprocess._active, which triggered a FD and memory
+        # leak.
+        # spawn a Popen, delete its reference and kill it
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import time;'
+                              'time.sleep(3)'],
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        ident = id(p)
+        pid = p.pid
+        del p
+        os.kill(pid, signal.SIGKILL)
+        # check that p is in the active processes list
+        self.assertIn(ident, [id(o) for o in subprocess._active])
+
+        # let some time for the process to exit, and create a new Popen: this
+        # should trigger the wait() of p
+        time.sleep(0.2)
+        with self.assertRaises(EnvironmentError) as c:
+            with subprocess.Popen(['nonexisting_i_hope'],
+                                  stdout=subprocess.PIPE,
+                                  stderr=subprocess.PIPE) as proc:
+                pass
+        # p should have been wait()ed on, and removed from the _active list
+        self.assertRaises(OSError, os.waitpid, pid, 0)
+        self.assertNotIn(ident, [id(o) for o in subprocess._active])
+
+
+ at unittest.skipUnless(mswindows, "Windows specific tests")
+class Win32ProcessTestCase(BaseTestCase):
+
+    def test_startupinfo(self) -> None:
+        # startupinfo argument
+        # We uses hardcoded constants, because we do not want to
+        # depend on win32all.
+        STARTF_USESHOWWINDOW = 1
+        SW_MAXIMIZE = 3
+        startupinfo = subprocess.STARTUPINFO()
+        startupinfo.dwFlags = STARTF_USESHOWWINDOW
+        startupinfo.wShowWindow = SW_MAXIMIZE
+        # Since Python is a console process, it won't be affected
+        # by wShowWindow, but the argument should be silently
+        # ignored
+        subprocess.call([sys.executable, "-c", "import sys; sys.exit(0)"],
+                        startupinfo=startupinfo)
+
+    def test_creationflags(self) -> None:
+        # creationflags argument
+        CREATE_NEW_CONSOLE = 16
+        sys.stderr.write("    a DOS box should flash briefly ...\n")
+        subprocess.call(sys.executable +
+                        ' -c "import time; time.sleep(0.25)"',
+                        creationflags=CREATE_NEW_CONSOLE)
+
+    def test_invalid_args(self) -> None:
+        # invalid arguments should raise ValueError
+        self.assertRaises(ValueError, subprocess.call,
+                          [sys.executable, "-c",
+                           "import sys; sys.exit(47)"],
+                          preexec_fn=lambda: 1)
+        self.assertRaises(ValueError, subprocess.call,
+                          [sys.executable, "-c",
+                           "import sys; sys.exit(47)"],
+                          stdout=subprocess.PIPE,
+                          close_fds=True)
+
+    def test_close_fds(self) -> None:
+        # close file descriptors
+        rc = subprocess.call([sys.executable, "-c",
+                              "import sys; sys.exit(47)"],
+                              close_fds=True)
+        self.assertEqual(rc, 47)
+
+    def test_shell_sequence(self) -> None:
+        # Run command through the shell (sequence)
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "physalis"
+        p = subprocess.Popen(["set"], shell=1,
+                             stdout=subprocess.PIPE,
+                             env=newenv)
+        self.addCleanup(p.stdout.close)
+        self.assertIn(b"physalis", p.stdout.read())
+
+    def test_shell_string(self) -> None:
+        # Run command through the shell (string)
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "physalis"
+        p = subprocess.Popen("set", shell=1,
+                             stdout=subprocess.PIPE,
+                             env=newenv)
+        self.addCleanup(p.stdout.close)
+        self.assertIn(b"physalis", p.stdout.read())
+
+    def test_call_string(self) -> None:
+        # call() function with string argument on Windows
+        rc = subprocess.call(sys.executable +
+                             ' -c "import sys; sys.exit(47)"')
+        self.assertEqual(rc, 47)
+
+    def _kill_process(self, method: str, *args: Any) -> None:
+        # Some win32 buildbot raises EOFError if stdin is inherited
+        p = subprocess.Popen([sys.executable, "-c", """if 1:
+                             import sys, time
+                             sys.stdout.write('x\\n')
+                             sys.stdout.flush()
+                             time.sleep(30)
+                             """],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        # Wait for the interpreter to be completely initialized before
+        # sending any signal.
+        p.stdout.read(1)
+        getattr(p, method)(*args)
+        _, stderr = p.communicate()
+        self.assertStderrEqual(stderr, b'')
+        returncode = p.wait()
+        self.assertNotEqual(returncode, 0)
+
+    def test_send_signal(self) -> None:
+        self._kill_process('send_signal', signal.SIGTERM)
+
+    def test_kill(self) -> None:
+        self._kill_process('kill')
+
+    def test_terminate(self) -> None:
+        self._kill_process('terminate')
+
+
+# The module says:
+#   "NB This only works (and is only relevant) for UNIX."
+#
+# Actually, getoutput should work on any platform with an os.popen, but
+# I'll take the comment as given, and skip this suite.
+ at unittest.skipUnless(os.name == 'posix', "only relevant for UNIX")
+class CommandTests(unittest.TestCase):
+    def test_getoutput(self) -> None:
+        self.assertEqual(subprocess.getoutput('echo xyzzy'), 'xyzzy')
+        self.assertEqual(subprocess.getstatusoutput('echo xyzzy'),
+                         (0, 'xyzzy'))
+
+        # we use mkdtemp in the next line to create an empty directory
+        # under our exclusive control; from that, we can invent a pathname
+        # that we _know_ won't exist.  This is guaranteed to fail.
+        dir = None # type: str
+        try:
+            dir = tempfile.mkdtemp()
+            name = os.path.join(dir, "foo")
+
+            status, output = subprocess.getstatusoutput('cat ' + name)
+            self.assertNotEqual(status, 0)
+        finally:
+            if dir is not None:
+                os.rmdir(dir)
+
+
+ at unittest.skipUnless(getattr(subprocess, '_has_poll', False),
+                     "poll system call not supported")
+class ProcessTestCaseNoPoll(ProcessTestCase):
+    def setUp(self) -> None:
+        subprocess._has_poll = False
+        ProcessTestCase.setUp(self)
+
+    def tearDown(self) -> None:
+        subprocess._has_poll = True
+        ProcessTestCase.tearDown(self)
+
+
+#@unittest.skipUnless(getattr(subprocess, '_posixsubprocess', False),
+#                     "_posixsubprocess extension module not found.")
+#class ProcessTestCasePOSIXPurePython(ProcessTestCase, POSIXProcessTestCase):
+#    @classmethod
+#    def setUpClass(cls):
+#        global subprocess
+#        assert subprocess._posixsubprocess
+#        # Reimport subprocess while forcing _posixsubprocess to not exist.
+#        with support.check_warnings(('.*_posixsubprocess .* not being used.*',
+#                                     RuntimeWarning)):
+#            subprocess = support.import_fresh_module(
+#                    'subprocess', blocked=['_posixsubprocess'])
+#        assert not subprocess._posixsubprocess
+#
+#    @classmethod
+#    def tearDownClass(cls):
+#        global subprocess
+#        # Reimport subprocess as it should be, restoring order to the universe#.
+#        subprocess = support.import_fresh_module('subprocess')
+#        assert subprocess._posixsubprocess
+
+
+class HelperFunctionTests(unittest.TestCase):
+    @unittest.skipIf(mswindows, "errno and EINTR make no sense on windows")
+    def test_eintr_retry_call(self) -> None:
+        record_calls = []  # type: List[Any]
+        def fake_os_func(*args: Any) -> tuple:
+            record_calls.append(args)
+            if len(record_calls) == 2:
+                raise OSError(errno.EINTR, "fake interrupted system call")
+            return tuple(reversed(args))
+
+        self.assertEqual((999, 256),
+                         subprocess._eintr_retry_call(fake_os_func, 256, 999))
+        self.assertEqual([(256, 999)], record_calls)
+        # This time there will be an EINTR so it will loop once.
+        self.assertEqual((666,),
+                         subprocess._eintr_retry_call(fake_os_func, 666))
+        self.assertEqual([(256, 999), (666,), (666,)], record_calls)
+
+
+ at unittest.skipUnless(mswindows, "Windows-specific tests")
+class CommandsWithSpaces (BaseTestCase):
+
+    def setUp(self) -> None:
+        super().setUp()
+        f, fname = mkstemp(".py", "te st")
+        self.fname = fname.lower ()
+        os.write(f, b"import sys;"
+                    b"sys.stdout.write('%d %s' % (len(sys.argv), [a.lower () for a in sys.argv]))"
+        )
+        os.close(f)
+
+    def tearDown(self) -> None:
+        os.remove(self.fname)
+        super().tearDown()
+
+    def with_spaces(self, *args: Any, **kwargs: Any) -> None:
+        kwargs['stdout'] = subprocess.PIPE
+        p = subprocess.Popen(*args, **kwargs)
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(
+          p.stdout.read ().decode("mbcs"),
+          "2 [%r, 'ab cd']" % self.fname
+        )
+
+    def test_shell_string_with_spaces(self) -> None:
+        # call() function with string argument with spaces on Windows
+        self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname,
+                                             "ab cd"), shell=1)
+
+    def test_shell_sequence_with_spaces(self) -> None:
+        # call() function with sequence argument with spaces on Windows
+        self.with_spaces([sys.executable, self.fname, "ab cd"], shell=1)
+
+    def test_noshell_string_with_spaces(self) -> None:
+        # call() function with string argument with spaces on Windows
+        self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname,
+                             "ab cd"))
+
+    def test_noshell_sequence_with_spaces(self) -> None:
+        # call() function with sequence argument with spaces on Windows
+        self.with_spaces([sys.executable, self.fname, "ab cd"])
+
+
+class ContextManagerTests(BaseTestCase):
+
+    def test_pipe(self) -> None:
+        with subprocess.Popen([sys.executable, "-c",
+                               "import sys;"
+                               "sys.stdout.write('stdout');"
+                               "sys.stderr.write('stderr');"],
+                              stdout=subprocess.PIPE,
+                              stderr=subprocess.PIPE) as proc:
+            self.assertEqual(proc.stdout.read(), b"stdout")
+            self.assertStderrEqual(proc.stderr.read(), b"stderr")
+
+        self.assertTrue(proc.stdout.closed)
+        self.assertTrue(proc.stderr.closed)
+
+    def test_returncode(self) -> None:
+        with subprocess.Popen([sys.executable, "-c",
+                               "import sys; sys.exit(100)"]) as proc:
+            pass
+        # __exit__ calls wait(), so the returncode should be set
+        self.assertEqual(proc.returncode, 100)
+
+    def test_communicate_stdin(self) -> None:
+        with subprocess.Popen([sys.executable, "-c",
+                              "import sys;"
+                              "sys.exit(sys.stdin.read() == 'context')"],
+                             stdin=subprocess.PIPE) as proc:
+            proc.communicate(b"context")
+            self.assertEqual(proc.returncode, 1)
+
+    def test_invalid_args(self) -> None:
+        with self.assertRaises(EnvironmentError) as c:
+            with subprocess.Popen(['nonexisting_i_hope'],
+                                  stdout=subprocess.PIPE,
+                                  stderr=subprocess.PIPE) as proc:
+                pass
+
+            if c.exception.errno != errno.ENOENT:  # ignore "no such file"
+                raise c.exception
+
+
+def test_main():
+    unit_tests = (ProcessTestCase,
+                  POSIXProcessTestCase,
+                  Win32ProcessTestCase,
+                  #ProcessTestCasePOSIXPurePython,
+                  CommandTests,
+                  ProcessTestCaseNoPoll,
+                  HelperFunctionTests,
+                  CommandsWithSpaces,
+                  ContextManagerTests,
+                  )
+
+    support.run_unittest(*unit_tests)
+    support.reap_children()
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/test-data/stdlib-samples/3.2/test/test_tempfile.py b/test-data/stdlib-samples/3.2/test/test_tempfile.py
new file mode 100644
index 0000000..31b0fec
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_tempfile.py
@@ -0,0 +1,1122 @@
+# tempfile.py unit tests.
+import tempfile
+import os
+import signal
+import sys
+import re
+import warnings
+
+import unittest
+from test import support
+
+from typing import Any, AnyStr, List, Dict, IO
+
+
+if hasattr(os, 'stat'):
+    import stat
+    has_stat = 1
+else:
+    has_stat = 0
+
+has_textmode = (tempfile._text_openflags != tempfile._bin_openflags)
+has_spawnl = hasattr(os, 'spawnl')
+
+# TEST_FILES may need to be tweaked for systems depending on the maximum
+# number of files that can be opened at one time (see ulimit -n)
+if sys.platform in ('openbsd3', 'openbsd4'):
+    TEST_FILES = 48
+else:
+    TEST_FILES = 100
+
+# This is organized as one test for each chunk of code in tempfile.py,
+# in order of their appearance in the file.  Testing which requires
+# threads is not done here.
+
+# Common functionality.
+class TC(unittest.TestCase):
+
+    str_check = re.compile(r"[a-zA-Z0-9_-]{6}$")
+
+    def setUp(self) -> None:
+        self._warnings_manager = support.check_warnings()
+        self._warnings_manager.__enter__()
+        warnings.filterwarnings("ignore", category=RuntimeWarning,
+                                message="mktemp", module=__name__)
+
+    def tearDown(self) -> None:
+        self._warnings_manager.__exit__(None, None, None)
+
+
+    def failOnException(self, what: str, ei: tuple = None) -> None:
+        if ei is None:
+            ei = sys.exc_info()
+        self.fail("%s raised %s: %s" % (what, ei[0], ei[1]))
+
+    def nameCheck(self, name: str, dir: str, pre: str, suf: str) -> None:
+        (ndir, nbase) = os.path.split(name)
+        npre  = nbase[:len(pre)]
+        nsuf  = nbase[len(nbase)-len(suf):]
+
+        # check for equality of the absolute paths!
+        self.assertEqual(os.path.abspath(ndir), os.path.abspath(dir),
+                         "file '%s' not in directory '%s'" % (name, dir))
+        self.assertEqual(npre, pre,
+                         "file '%s' does not begin with '%s'" % (nbase, pre))
+        self.assertEqual(nsuf, suf,
+                         "file '%s' does not end with '%s'" % (nbase, suf))
+
+        nbase = nbase[len(pre):len(nbase)-len(suf)]
+        self.assertTrue(self.str_check.match(nbase),
+                     "random string '%s' does not match /^[a-zA-Z0-9_-]{6}$/"
+                     % nbase)
+
+test_classes = [] # type: List[type]
+
+class test_exports(TC):
+    def test_exports(self) -> None:
+        # There are no surprising symbols in the tempfile module
+        dict = tempfile.__dict__
+
+        expected = {
+            "NamedTemporaryFile" : 1,
+            "TemporaryFile" : 1,
+            "mkstemp" : 1,
+            "mkdtemp" : 1,
+            "mktemp" : 1,
+            "TMP_MAX" : 1,
+            "gettempprefix" : 1,
+            "gettempdir" : 1,
+            "tempdir" : 1,
+            "template" : 1,
+            "SpooledTemporaryFile" : 1,
+            "TemporaryDirectory" : 1,
+        }
+
+        unexp = []  # type: List[str]
+        for key in dict:
+            if key[0] != '_' and key not in expected:
+                unexp.append(key)
+        self.assertTrue(len(unexp) == 0,
+                        "unexpected keys: %s" % unexp)
+
+test_classes.append(test_exports)
+
+
+class test__RandomNameSequence(TC):
+    """Test the internal iterator object _RandomNameSequence."""
+
+    def setUp(self) -> None:
+        self.r = tempfile._RandomNameSequence()
+        super().setUp()
+
+    def test_get_six_char_str(self) -> None:
+        # _RandomNameSequence returns a six-character string
+        s = next(self.r)
+        self.nameCheck(s, '', '', '')
+
+    def test_many(self) -> None:
+        # _RandomNameSequence returns no duplicate strings (stochastic)
+
+        dict = {}  # type: Dict[str, int]
+        r = self.r
+        for i in range(TEST_FILES):
+            s = next(r)
+            self.nameCheck(s, '', '', '')
+            self.assertNotIn(s, dict)
+            dict[s] = 1
+
+    def supports_iter(self) -> None:
+        # _RandomNameSequence supports the iterator protocol
+
+        i = 0
+        r = self.r
+        try:
+            for s in r:
+                i += 1
+                if i == 20:
+                    break
+        except:
+            self.failOnException("iteration")
+
+    @unittest.skipUnless(hasattr(os, 'fork'),
+        "os.fork is required for this test")
+    def test_process_awareness(self) -> None:
+        # ensure that the random source differs between
+        # child and parent.
+        read_fd, write_fd = os.pipe()
+        pid = None # type: int
+        try:
+            pid = os.fork()
+            if not pid:
+                os.close(read_fd)
+                os.write(write_fd, next(self.r).encode("ascii"))
+                os.close(write_fd)
+                # bypass the normal exit handlers- leave those to
+                # the parent.
+                os._exit(0)
+            parent_value = next(self.r)
+            child_value = os.read(read_fd, len(parent_value)).decode("ascii")
+        finally:
+            if pid:
+                # best effort to ensure the process can't bleed out
+                # via any bugs above
+                try:
+                    os.kill(pid, signal.SIGKILL)
+                except EnvironmentError:
+                    pass
+            os.close(read_fd)
+            os.close(write_fd)
+        self.assertNotEqual(child_value, parent_value)
+
+
+test_classes.append(test__RandomNameSequence)
+
+
+class test__candidate_tempdir_list(TC):
+    """Test the internal function _candidate_tempdir_list."""
+
+    def test_nonempty_list(self) -> None:
+        # _candidate_tempdir_list returns a nonempty list of strings
+
+        cand = tempfile._candidate_tempdir_list()
+
+        self.assertFalse(len(cand) == 0)
+        for c in cand:
+            self.assertIsInstance(c, str)
+
+    def test_wanted_dirs(self) -> None:
+        # _candidate_tempdir_list contains the expected directories
+
+        # Make sure the interesting environment variables are all set.
+        with support.EnvironmentVarGuard() as env:
+            for envname in 'TMPDIR', 'TEMP', 'TMP':
+                dirname = os.getenv(envname)
+                if not dirname:
+                    env[envname] = os.path.abspath(envname)
+
+            cand = tempfile._candidate_tempdir_list()
+
+            for envname in 'TMPDIR', 'TEMP', 'TMP':
+                dirname = os.getenv(envname)
+                if not dirname: raise ValueError
+                self.assertIn(dirname, cand)
+
+            try:
+                dirname = os.getcwd()
+            except (AttributeError, os.error):
+                dirname = os.curdir
+
+            self.assertIn(dirname, cand)
+
+            # Not practical to try to verify the presence of OS-specific
+            # paths in this list.
+
+test_classes.append(test__candidate_tempdir_list)
+
+
+# We test _get_default_tempdir by testing gettempdir.
+
+
+class test__get_candidate_names(TC):
+    """Test the internal function _get_candidate_names."""
+
+    def test_retval(self) -> None:
+        # _get_candidate_names returns a _RandomNameSequence object
+        obj = tempfile._get_candidate_names()
+        self.assertIsInstance(obj, tempfile._RandomNameSequence)
+
+    def test_same_thing(self) -> None:
+        # _get_candidate_names always returns the same object
+        a = tempfile._get_candidate_names()
+        b = tempfile._get_candidate_names()
+
+        self.assertTrue(a is b)
+
+test_classes.append(test__get_candidate_names)
+
+
+class test__mkstemp_inner(TC):
+    """Test the internal function _mkstemp_inner."""
+
+    class mkstemped:
+        _bflags = tempfile._bin_openflags
+        _tflags = tempfile._text_openflags
+
+        def __init__(self, dir: str, pre: str, suf: str, bin: int) -> None:
+            if bin: flags = self._bflags
+            else:   flags = self._tflags
+
+            (self.fd, self.name) = tempfile._mkstemp_inner(dir, pre, suf, flags)
+
+            self._close = os.close
+            self._unlink = os.unlink
+
+        def write(self, str: bytes) -> None:
+            os.write(self.fd, str)
+
+        def __del__(self) -> None:
+            self._close(self.fd)
+            self._unlink(self.name)
+
+    def do_create(self, dir: str = None, pre: str = "", suf: str= "",
+                  bin: int = 1) -> mkstemped:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            file = test__mkstemp_inner.mkstemped(dir, pre, suf, bin) # see #259
+        except:
+            self.failOnException("_mkstemp_inner")
+
+        self.nameCheck(file.name, dir, pre, suf)
+        return file
+
+    def test_basic(self) -> None:
+        # _mkstemp_inner can create files
+        self.do_create().write(b"blat")
+        self.do_create(pre="a").write(b"blat")
+        self.do_create(suf="b").write(b"blat")
+        self.do_create(pre="a", suf="b").write(b"blat")
+        self.do_create(pre="aa", suf=".txt").write(b"blat")
+
+    def test_basic_many(self) -> None:
+        # _mkstemp_inner can create many files (stochastic)
+        extant = list(range(TEST_FILES))  # type: List[Any]
+        for i in extant:
+            extant[i] = self.do_create(pre="aa")
+
+    def test_choose_directory(self) -> None:
+        # _mkstemp_inner can create files in a user-selected directory
+        dir = tempfile.mkdtemp()
+        try:
+            self.do_create(dir=dir).write(b"blat")
+        finally:
+            os.rmdir(dir)
+
+    def test_file_mode(self) -> None:
+        # _mkstemp_inner creates files with the proper mode
+        if not has_stat:
+            return            # ugh, can't use SkipTest.
+
+        file = self.do_create()
+        mode = stat.S_IMODE(os.stat(file.name).st_mode)
+        expected = 0o600
+        if sys.platform in ('win32', 'os2emx'):
+            # There's no distinction among 'user', 'group' and 'world';
+            # replicate the 'user' bits.
+            user = expected >> 6
+            expected = user * (1 + 8 + 64)
+        self.assertEqual(mode, expected)
+
+    def test_noinherit(self) -> None:
+        # _mkstemp_inner file handles are not inherited by child processes
+        if not has_spawnl:
+            return            # ugh, can't use SkipTest.
+
+        if support.verbose:
+            v="v"
+        else:
+            v="q"
+
+        file = self.do_create()
+        fd = "%d" % file.fd
+
+        try:
+            me = __file__ # type: str
+        except NameError:
+            me = sys.argv[0]
+
+        # We have to exec something, so that FD_CLOEXEC will take
+        # effect.  The core of this test is therefore in
+        # tf_inherit_check.py, which see.
+        tester = os.path.join(os.path.dirname(os.path.abspath(me)),
+                              "tf_inherit_check.py")
+
+        # On Windows a spawn* /path/ with embedded spaces shouldn't be quoted,
+        # but an arg with embedded spaces should be decorated with double
+        # quotes on each end
+        if sys.platform in ('win32',):
+            decorated = '"%s"' % sys.executable
+            tester = '"%s"' % tester
+        else:
+            decorated = sys.executable
+
+        retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd)
+        self.assertFalse(retval < 0,
+                    "child process caught fatal signal %d" % -retval)
+        self.assertFalse(retval > 0, "child process reports failure %d"%retval)
+
+    def test_textmode(self) -> None:
+        # _mkstemp_inner can create files in text mode
+        if not has_textmode:
+            return            # ugh, can't use SkipTest.
+
+        # A text file is truncated at the first Ctrl+Z byte
+        f = self.do_create(bin=0)
+        f.write(b"blat\x1a")
+        f.write(b"extra\n")
+        os.lseek(f.fd, 0, os.SEEK_SET)
+        self.assertEqual(os.read(f.fd, 20), b"blat")
+
+test_classes.append(test__mkstemp_inner)
+
+
+class test_gettempprefix(TC):
+    """Test gettempprefix()."""
+
+    def test_sane_template(self) -> None:
+        # gettempprefix returns a nonempty prefix string
+        p = tempfile.gettempprefix()
+
+        self.assertIsInstance(p, str)
+        self.assertTrue(len(p) > 0)
+
+    def test_usable_template(self) -> None:
+        # gettempprefix returns a usable prefix string
+
+        # Create a temp directory, avoiding use of the prefix.
+        # Then attempt to create a file whose name is
+        # prefix + 'xxxxxx.xxx' in that directory.
+        p = tempfile.gettempprefix() + "xxxxxx.xxx"
+        d = tempfile.mkdtemp(prefix="")
+        try:
+            p = os.path.join(d, p)
+            try:
+                fd = os.open(p, os.O_RDWR | os.O_CREAT)
+            except:
+                self.failOnException("os.open")
+            os.close(fd)
+            os.unlink(p)
+        finally:
+            os.rmdir(d)
+
+test_classes.append(test_gettempprefix)
+
+
+class test_gettempdir(TC):
+    """Test gettempdir()."""
+
+    def test_directory_exists(self) -> None:
+        # gettempdir returns a directory which exists
+
+        dir = tempfile.gettempdir()
+        self.assertTrue(os.path.isabs(dir) or dir == os.curdir,
+                     "%s is not an absolute path" % dir)
+        self.assertTrue(os.path.isdir(dir),
+                     "%s is not a directory" % dir)
+
+    def test_directory_writable(self) -> None:
+        # gettempdir returns a directory writable by the user
+
+        # sneaky: just instantiate a NamedTemporaryFile, which
+        # defaults to writing into the directory returned by
+        # gettempdir.
+        try:
+            file = tempfile.NamedTemporaryFile()
+            file.write(b"blat")
+            file.close()
+        except:
+            self.failOnException("create file in %s" % tempfile.gettempdir())
+
+    def test_same_thing(self) -> None:
+        # gettempdir always returns the same object
+        a = tempfile.gettempdir()
+        b = tempfile.gettempdir()
+
+        self.assertTrue(a is b)
+
+test_classes.append(test_gettempdir)
+
+
+class test_mkstemp(TC):
+    """Test mkstemp()."""
+
+    def do_create(self, dir: str = None, pre: str = "", suf: str = "") -> None:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            (fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf)
+            (ndir, nbase) = os.path.split(name)
+            adir = os.path.abspath(dir)
+            self.assertEqual(adir, ndir,
+                "Directory '%s' incorrectly returned as '%s'" % (adir, ndir))
+        except:
+            self.failOnException("mkstemp")
+
+        try:
+            self.nameCheck(name, dir, pre, suf)
+        finally:
+            os.close(fd)
+            os.unlink(name)
+
+    def test_basic(self) -> None:
+        # mkstemp can create files
+        self.do_create()
+        self.do_create(pre="a")
+        self.do_create(suf="b")
+        self.do_create(pre="a", suf="b")
+        self.do_create(pre="aa", suf=".txt")
+        self.do_create(dir=".")
+
+    def test_choose_directory(self) -> None:
+        # mkstemp can create directories in a user-selected directory
+        dir = tempfile.mkdtemp()
+        try:
+            self.do_create(dir=dir)
+        finally:
+            os.rmdir(dir)
+
+test_classes.append(test_mkstemp)
+
+
+class test_mkdtemp(TC):
+    """Test mkdtemp()."""
+
+    def do_create(self, dir: str = None, pre: str = "", suf: str = "") -> str:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf)
+        except:
+            self.failOnException("mkdtemp")
+
+        try:
+            self.nameCheck(name, dir, pre, suf)
+            return name
+        except:
+            os.rmdir(name)
+            raise
+
+    def test_basic(self) -> None:
+        # mkdtemp can create directories
+        os.rmdir(self.do_create())
+        os.rmdir(self.do_create(pre="a"))
+        os.rmdir(self.do_create(suf="b"))
+        os.rmdir(self.do_create(pre="a", suf="b"))
+        os.rmdir(self.do_create(pre="aa", suf=".txt"))
+
+    def test_basic_many(self) -> None:
+        # mkdtemp can create many directories (stochastic)
+        extant = list(range(TEST_FILES))  # type: List[Any]
+        try:
+            for i in extant:
+                extant[i] = self.do_create(pre="aa")
+        finally:
+            for i in extant:
+                if(isinstance(i, str)):
+                    os.rmdir(i)
+
+    def test_choose_directory(self) -> None:
+        # mkdtemp can create directories in a user-selected directory
+        dir = tempfile.mkdtemp()
+        try:
+            os.rmdir(self.do_create(dir=dir))
+        finally:
+            os.rmdir(dir)
+
+    def test_mode(self) -> None:
+        # mkdtemp creates directories with the proper mode
+        if not has_stat:
+            return            # ugh, can't use SkipTest.
+
+        dir = self.do_create()
+        try:
+            mode = stat.S_IMODE(os.stat(dir).st_mode)
+            mode &= 0o777 # Mask off sticky bits inherited from /tmp
+            expected = 0o700
+            if sys.platform in ('win32', 'os2emx'):
+                # There's no distinction among 'user', 'group' and 'world';
+                # replicate the 'user' bits.
+                user = expected >> 6
+                expected = user * (1 + 8 + 64)
+            self.assertEqual(mode, expected)
+        finally:
+            os.rmdir(dir)
+
+test_classes.append(test_mkdtemp)
+
+
+class test_mktemp(TC):
+    """Test mktemp()."""
+
+    # For safety, all use of mktemp must occur in a private directory.
+    # We must also suppress the RuntimeWarning it generates.
+    def setUp(self) -> None:
+        self.dir = tempfile.mkdtemp()
+        super().setUp()
+
+    def tearDown(self) -> None:
+        if self.dir:
+            os.rmdir(self.dir)
+            self.dir = None
+        super().tearDown()
+
+    class mktemped:
+        def _unlink(self, path: str) -> None:
+            os.unlink(path)
+
+        _bflags = tempfile._bin_openflags
+
+        def __init__(self, dir: str, pre: str, suf: str) -> None:
+            self.name = tempfile.mktemp(dir=dir, prefix=pre, suffix=suf)
+            # Create the file.  This will raise an exception if it's
+            # mysteriously appeared in the meanwhile.
+            os.close(os.open(self.name, self._bflags, 0o600))
+
+        def __del__(self) -> None:
+            self._unlink(self.name)
+
+    def do_create(self, pre: str = "", suf: str = "") -> mktemped:
+        try:
+            file = test_mktemp.mktemped(self.dir, pre, suf) # see #259
+        except:
+            self.failOnException("mktemp")
+
+        self.nameCheck(file.name, self.dir, pre, suf)
+        return file
+
+    def test_basic(self) -> None:
+        # mktemp can choose usable file names
+        self.do_create()
+        self.do_create(pre="a")
+        self.do_create(suf="b")
+        self.do_create(pre="a", suf="b")
+        self.do_create(pre="aa", suf=".txt")
+
+    def test_many(self) -> None:
+        # mktemp can choose many usable file names (stochastic)
+        extant = list(range(TEST_FILES))  # type: List[Any]
+        for i in extant:
+            extant[i] = self.do_create(pre="aa")
+
+##     def test_warning(self):
+##         # mktemp issues a warning when used
+##         warnings.filterwarnings("error",
+##                                 category=RuntimeWarning,
+##                                 message="mktemp")
+##         self.assertRaises(RuntimeWarning,
+##                           tempfile.mktemp, dir=self.dir)
+
+test_classes.append(test_mktemp)
+
+
+# We test _TemporaryFileWrapper by testing NamedTemporaryFile.
+
+
+class test_NamedTemporaryFile(TC):
+    """Test NamedTemporaryFile()."""
+
+    def do_create(self, dir: str = None, pre: str = "", suf: str = "",
+                  delete: bool = True) -> IO[Any]:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf,
+                                               delete=delete)
+        except:
+            self.failOnException("NamedTemporaryFile")
+
+        self.nameCheck(file.name, dir, pre, suf)
+        return file
+
+
+    def test_basic(self) -> None:
+        # NamedTemporaryFile can create files
+        self.do_create()
+        self.do_create(pre="a")
+        self.do_create(suf="b")
+        self.do_create(pre="a", suf="b")
+        self.do_create(pre="aa", suf=".txt")
+
+    def test_creates_named(self) -> None:
+        # NamedTemporaryFile creates files with names
+        f = tempfile.NamedTemporaryFile()
+        self.assertTrue(os.path.exists(f.name),
+                        "NamedTemporaryFile %s does not exist" % f.name)
+
+    def test_del_on_close(self) -> None:
+        # A NamedTemporaryFile is deleted when closed
+        dir = tempfile.mkdtemp()
+        try:
+            f = tempfile.NamedTemporaryFile(dir=dir)
+            f.write(b'blat')
+            f.close()
+            self.assertFalse(os.path.exists(f.name),
+                        "NamedTemporaryFile %s exists after close" % f.name)
+        finally:
+            os.rmdir(dir)
+
+    def test_dis_del_on_close(self) -> None:
+        # Tests that delete-on-close can be disabled
+        dir = tempfile.mkdtemp()
+        tmp = None # type: str
+        try:
+            f = tempfile.NamedTemporaryFile(dir=dir, delete=False)
+            tmp = f.name
+            f.write(b'blat')
+            f.close()
+            self.assertTrue(os.path.exists(f.name),
+                        "NamedTemporaryFile %s missing after close" % f.name)
+        finally:
+            if tmp is not None:
+                os.unlink(tmp)
+            os.rmdir(dir)
+
+    def test_multiple_close(self) -> None:
+        # A NamedTemporaryFile can be closed many times without error
+        f = tempfile.NamedTemporaryFile()
+        f.write(b'abc\n')
+        f.close()
+        try:
+            f.close()
+            f.close()
+        except:
+            self.failOnException("close")
+
+    def test_context_manager(self) -> None:
+        # A NamedTemporaryFile can be used as a context manager
+        with tempfile.NamedTemporaryFile() as f:
+            self.assertTrue(os.path.exists(f.name))
+        self.assertFalse(os.path.exists(f.name))
+        def use_closed():
+            with f:
+                pass
+        self.assertRaises(ValueError, use_closed)
+
+    # How to test the mode and bufsize parameters?
+
+test_classes.append(test_NamedTemporaryFile)
+
+class test_SpooledTemporaryFile(TC):
+    """Test SpooledTemporaryFile()."""
+
+    def do_create(self, max_size: int = 0, dir: str = None, pre: str = "",
+                  suf: str = "") -> tempfile.SpooledTemporaryFile:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf)
+        except:
+            self.failOnException("SpooledTemporaryFile")
+
+        return file
+
+
+    def test_basic(self) -> None:
+        # SpooledTemporaryFile can create files
+        f = self.do_create()
+        self.assertFalse(f._rolled)
+        f = self.do_create(max_size=100, pre="a", suf=".txt")
+        self.assertFalse(f._rolled)
+
+    def test_del_on_close(self) -> None:
+        # A SpooledTemporaryFile is deleted when closed
+        dir = tempfile.mkdtemp()
+        try:
+            f = tempfile.SpooledTemporaryFile(max_size=10, dir=dir)
+            self.assertFalse(f._rolled)
+            f.write(b'blat ' * 5)
+            self.assertTrue(f._rolled)
+            filename = f.name
+            f.close()
+            self.assertFalse(isinstance(filename, str) and os.path.exists(filename),
+                        "SpooledTemporaryFile %s exists after close" % filename)
+        finally:
+            os.rmdir(dir)
+
+    def test_rewrite_small(self) -> None:
+        # A SpooledTemporaryFile can be written to multiple within the max_size
+        f = self.do_create(max_size=30)
+        self.assertFalse(f._rolled)
+        for i in range(5):
+            f.seek(0, 0)
+            f.write(b'x' * 20)
+        self.assertFalse(f._rolled)
+
+    def test_write_sequential(self) -> None:
+        # A SpooledTemporaryFile should hold exactly max_size bytes, and roll
+        # over afterward
+        f = self.do_create(max_size=30)
+        self.assertFalse(f._rolled)
+        f.write(b'x' * 20)
+        self.assertFalse(f._rolled)
+        f.write(b'x' * 10)
+        self.assertFalse(f._rolled)
+        f.write(b'x')
+        self.assertTrue(f._rolled)
+
+    def test_writelines(self) -> None:
+        # Verify writelines with a SpooledTemporaryFile
+        f = self.do_create()
+        f.writelines([b'x', b'y', b'z'])
+        f.seek(0)
+        buf = f.read()
+        self.assertEqual(buf, b'xyz')
+
+    def test_writelines_sequential(self) -> None:
+        # A SpooledTemporaryFile should hold exactly max_size bytes, and roll
+        # over afterward
+        f = self.do_create(max_size=35)
+        f.writelines([b'x' * 20, b'x' * 10, b'x' * 5])
+        self.assertFalse(f._rolled)
+        f.write(b'x')
+        self.assertTrue(f._rolled)
+
+    def test_sparse(self) -> None:
+        # A SpooledTemporaryFile that is written late in the file will extend
+        # when that occurs
+        f = self.do_create(max_size=30)
+        self.assertFalse(f._rolled)
+        f.seek(100, 0)
+        self.assertFalse(f._rolled)
+        f.write(b'x')
+        self.assertTrue(f._rolled)
+
+    def test_fileno(self) -> None:
+        # A SpooledTemporaryFile should roll over to a real file on fileno()
+        f = self.do_create(max_size=30)
+        self.assertFalse(f._rolled)
+        self.assertTrue(f.fileno() > 0)
+        self.assertTrue(f._rolled)
+
+    def test_multiple_close_before_rollover(self) -> None:
+        # A SpooledTemporaryFile can be closed many times without error
+        f = tempfile.SpooledTemporaryFile()
+        f.write(b'abc\n')
+        self.assertFalse(f._rolled)
+        f.close()
+        try:
+            f.close()
+            f.close()
+        except:
+            self.failOnException("close")
+
+    def test_multiple_close_after_rollover(self) -> None:
+        # A SpooledTemporaryFile can be closed many times without error
+        f = tempfile.SpooledTemporaryFile(max_size=1)
+        f.write(b'abc\n')
+        self.assertTrue(f._rolled)
+        f.close()
+        try:
+            f.close()
+            f.close()
+        except:
+            self.failOnException("close")
+
+    def test_bound_methods(self) -> None:
+        # It should be OK to steal a bound method from a SpooledTemporaryFile
+        # and use it independently; when the file rolls over, those bound
+        # methods should continue to function
+        f = self.do_create(max_size=30)
+        read = f.read
+        write = f.write
+        seek = f.seek
+
+        write(b"a" * 35)
+        write(b"b" * 35)
+        seek(0, 0)
+        self.assertEqual(read(70), b'a'*35 + b'b'*35)
+
+    def test_text_mode(self) -> None:
+        # Creating a SpooledTemporaryFile with a text mode should produce
+        # a file object reading and writing (Unicode) text strings.
+        f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10)
+        f.write("abc\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "abc\n")
+        f.write("def\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "abc\ndef\n")
+        f.write("xyzzy\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "abc\ndef\nxyzzy\n")
+        # Check that Ctrl+Z doesn't truncate the file
+        f.write("foo\x1abar\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "abc\ndef\nxyzzy\nfoo\x1abar\n")
+
+    def test_text_newline_and_encoding(self) -> None:
+        f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10,
+                                          newline='', encoding='utf-8')
+        f.write("\u039B\r\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "\u039B\r\n")
+        self.assertFalse(f._rolled)
+
+        f.write("\u039B" * 20 + "\r\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "\u039B\r\n" + ("\u039B" * 20) + "\r\n")
+        self.assertTrue(f._rolled)
+
+    def test_context_manager_before_rollover(self) -> None:
+        # A SpooledTemporaryFile can be used as a context manager
+        with tempfile.SpooledTemporaryFile(max_size=1) as f:
+            self.assertFalse(f._rolled)
+            self.assertFalse(f.closed)
+        self.assertTrue(f.closed)
+        def use_closed():
+            with f:
+                pass
+        self.assertRaises(ValueError, use_closed)
+
+    def test_context_manager_during_rollover(self) -> None:
+        # A SpooledTemporaryFile can be used as a context manager
+        with tempfile.SpooledTemporaryFile(max_size=1) as f:
+            self.assertFalse(f._rolled)
+            f.write(b'abc\n')
+            f.flush()
+            self.assertTrue(f._rolled)
+            self.assertFalse(f.closed)
+        self.assertTrue(f.closed)
+        def use_closed():
+            with f:
+                pass
+        self.assertRaises(ValueError, use_closed)
+
+    def test_context_manager_after_rollover(self) -> None:
+        # A SpooledTemporaryFile can be used as a context manager
+        f = tempfile.SpooledTemporaryFile(max_size=1)
+        f.write(b'abc\n')
+        f.flush()
+        self.assertTrue(f._rolled)
+        with f:
+            self.assertFalse(f.closed)
+        self.assertTrue(f.closed)
+        def use_closed():
+            with f:
+                pass
+        self.assertRaises(ValueError, use_closed)
+
+
+test_classes.append(test_SpooledTemporaryFile)
+
+
+class test_TemporaryFile(TC):
+    """Test TemporaryFile()."""
+
+    def test_basic(self) -> None:
+        # TemporaryFile can create files
+        # No point in testing the name params - the file has no name.
+        try:
+            tempfile.TemporaryFile()
+        except:
+            self.failOnException("TemporaryFile")
+
+    def test_has_no_name(self) -> None:
+        # TemporaryFile creates files with no names (on this system)
+        dir = tempfile.mkdtemp()
+        f = tempfile.TemporaryFile(dir=dir)
+        f.write(b'blat')
+
+        # Sneaky: because this file has no name, it should not prevent
+        # us from removing the directory it was created in.
+        try:
+            os.rmdir(dir)
+        except:
+            ei = sys.exc_info()
+            # cleanup
+            f.close()
+            os.rmdir(dir)
+            self.failOnException("rmdir", ei)
+
+    def test_multiple_close(self) -> None:
+        # A TemporaryFile can be closed many times without error
+        f = tempfile.TemporaryFile()
+        f.write(b'abc\n')
+        f.close()
+        try:
+            f.close()
+            f.close()
+        except:
+            self.failOnException("close")
+
+    # How to test the mode and bufsize parameters?
+    def test_mode_and_encoding(self) -> None:
+
+        def roundtrip(input: AnyStr, *args: Any, **kwargs: Any) -> None:
+            with tempfile.TemporaryFile(*args, **kwargs) as fileobj:
+                fileobj.write(input)
+                fileobj.seek(0)
+                self.assertEqual(input, fileobj.read())
+
+        roundtrip(b"1234", "w+b")
+        roundtrip("abdc\n", "w+")
+        roundtrip("\u039B", "w+", encoding="utf-16")
+        roundtrip("foo\r\n", "w+", newline="")
+
+
+if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile:
+    test_classes.append(test_TemporaryFile)
+
+
+# Helper for test_del_on_shutdown
+class NulledModules:
+    def __init__(self, *modules: Any) -> None:
+        self.refs = [mod.__dict__ for mod in modules]
+        self.contents = [ref.copy() for ref in self.refs]
+
+    def __enter__(self) -> None:
+        for d in self.refs:
+            for key in d:
+                d[key] = None
+
+    def __exit__(self, *exc_info: Any) -> None:
+        for d, c in zip(self.refs, self.contents):
+            d.clear()
+            d.update(c)
+
+class test_TemporaryDirectory(TC):
+    """Test TemporaryDirectory()."""
+
+    def do_create(self, dir: str = None, pre: str = "", suf: str = "",
+                  recurse: int = 1) -> tempfile.TemporaryDirectory:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            tmp = tempfile.TemporaryDirectory(dir=dir, prefix=pre, suffix=suf)
+        except:
+            self.failOnException("TemporaryDirectory")
+        self.nameCheck(tmp.name, dir, pre, suf)
+        # Create a subdirectory and some files
+        if recurse:
+            self.do_create(tmp.name, pre, suf, recurse-1)
+        with open(os.path.join(tmp.name, "test.txt"), "wb") as f:
+            f.write(b"Hello world!")
+        return tmp
+
+    def test_mkdtemp_failure(self) -> None:
+        # Check no additional exception if mkdtemp fails
+        # Previously would raise AttributeError instead
+        # (noted as part of Issue #10188)
+        with tempfile.TemporaryDirectory() as nonexistent:
+            pass
+        with self.assertRaises(os.error):
+            tempfile.TemporaryDirectory(dir=nonexistent)
+
+    def test_explicit_cleanup(self) -> None:
+        # A TemporaryDirectory is deleted when cleaned up
+        dir = tempfile.mkdtemp()
+        try:
+            d = self.do_create(dir=dir)
+            self.assertTrue(os.path.exists(d.name),
+                            "TemporaryDirectory %s does not exist" % d.name)
+            d.cleanup()
+            self.assertFalse(os.path.exists(d.name),
+                        "TemporaryDirectory %s exists after cleanup" % d.name)
+        finally:
+            os.rmdir(dir)
+
+    @support.skip_unless_symlink
+    def test_cleanup_with_symlink_to_a_directory(self) -> None:
+        # cleanup() should not follow symlinks to directories (issue #12464)
+        d1 = self.do_create()
+        d2 = self.do_create()
+
+        # Symlink d1/foo -> d2
+        os.symlink(d2.name, os.path.join(d1.name, "foo"))
+
+        # This call to cleanup() should not follow the "foo" symlink
+        d1.cleanup()
+
+        self.assertFalse(os.path.exists(d1.name),
+                         "TemporaryDirectory %s exists after cleanup" % d1.name)
+        self.assertTrue(os.path.exists(d2.name),
+                        "Directory pointed to by a symlink was deleted")
+        self.assertEqual(os.listdir(d2.name), ['test.txt'],
+                         "Contents of the directory pointed to by a symlink "
+                         "were deleted")
+        d2.cleanup()
+
+    @support.cpython_only
+    def test_del_on_collection(self) -> None:
+        # A TemporaryDirectory is deleted when garbage collected
+        dir = tempfile.mkdtemp()
+        try:
+            d = self.do_create(dir=dir)
+            name = d.name
+            del d # Rely on refcounting to invoke __del__
+            self.assertFalse(os.path.exists(name),
+                        "TemporaryDirectory %s exists after __del__" % name)
+        finally:
+            os.rmdir(dir)
+
+    @unittest.expectedFailure # See issue #10188
+    def test_del_on_shutdown(self) -> None:
+        # A TemporaryDirectory may be cleaned up during shutdown
+        # Make sure it works with the relevant modules nulled out
+        with self.do_create() as dir:
+            d = self.do_create(dir=dir)
+            # Mimic the nulling out of modules that
+            # occurs during system shutdown
+            modules = [os, os.path]
+            if has_stat:
+                modules.append(stat)
+            # Currently broken, so suppress the warning
+            # that is otherwise emitted on stdout
+            with support.captured_stderr() as err:
+                with NulledModules(*modules):
+                    d.cleanup()
+            # Currently broken, so stop spurious exception by
+            # indicating the object has already been closed
+            d._closed = True
+            # And this assert will fail, as expected by the
+            # unittest decorator...
+            self.assertFalse(os.path.exists(d.name),
+                        "TemporaryDirectory %s exists after cleanup" % d.name)
+
+    def test_warnings_on_cleanup(self) -> None:
+        # Two kinds of warning on shutdown
+        #   Issue 10888: may write to stderr if modules are nulled out
+        #   ResourceWarning will be triggered by __del__
+        with self.do_create() as dir:
+            if os.sep != '\\':
+                # Embed a backslash in order to make sure string escaping
+                # in the displayed error message is dealt with correctly
+                suffix = '\\check_backslash_handling'
+            else:
+                suffix = ''
+            d = self.do_create(dir=dir, suf=suffix)
+
+            #Check for the Issue 10888 message
+            modules = [os, os.path]
+            if has_stat:
+                modules.append(stat)
+            with support.captured_stderr() as err:
+                with NulledModules(*modules):
+                    d.cleanup()
+            message = err.getvalue().replace('\\\\', '\\')
+            self.assertIn("while cleaning up",  message)
+            self.assertIn(d.name,  message)
+
+            # Check for the resource warning
+            with support.check_warnings(('Implicitly', ResourceWarning), quiet=False):
+                warnings.filterwarnings("always", category=ResourceWarning)
+                d.__del__()
+            self.assertFalse(os.path.exists(d.name),
+                        "TemporaryDirectory %s exists after __del__" % d.name)
+
+    def test_multiple_close(self) -> None:
+        # Can be cleaned-up many times without error
+        d = self.do_create()
+        d.cleanup()
+        try:
+            d.cleanup()
+            d.cleanup()
+        except:
+            self.failOnException("cleanup")
+
+    def test_context_manager(self) -> None:
+        # Can be used as a context manager
+        d = self.do_create()
+        with d as name:
+            self.assertTrue(os.path.exists(name))
+            self.assertEqual(name, d.name)
+        self.assertFalse(os.path.exists(name))
+
+
+test_classes.append(test_TemporaryDirectory)
+
+def test_main() -> None:
+    support.run_unittest(*test_classes)
+
+if __name__ == "__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_textwrap.py b/test-data/stdlib-samples/3.2/test/test_textwrap.py
new file mode 100644
index 0000000..79d921a
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_textwrap.py
@@ -0,0 +1,601 @@
+#
+# Test suite for the textwrap module.
+#
+# Original tests written by Greg Ward <gward at python.net>.
+# Converted to PyUnit by Peter Hansen <peter at engcorp.com>.
+# Currently maintained by Greg Ward.
+#
+# $Id$
+#
+
+import unittest
+from test import support
+
+from typing import Any, List, Sequence
+
+from textwrap import TextWrapper, wrap, fill, dedent
+
+
+class BaseTestCase(unittest.TestCase):
+    '''Parent class with utility methods for textwrap tests.'''
+
+    wrapper = None  # type: TextWrapper
+
+    def show(self, textin: Sequence[str]) -> str:
+        if isinstance(textin, list):
+            results = []  # type: List[str]
+            for i in range(len(textin)):
+                results.append("  %d: %r" % (i, textin[i]))
+            result = '\n'.join(results)
+        elif isinstance(textin, str):
+            result = "  %s\n" % repr(textin)
+        return result
+
+
+    def check(self, result: Sequence[str], expect: Sequence[str]) -> None:
+        self.assertEqual(result, expect,
+            'expected:\n%s\nbut got:\n%s' % (
+                self.show(expect), self.show(result)))
+
+    def check_wrap(self, text: str, width: int, expect: Sequence[str],
+                   **kwargs: Any) -> None:
+        result = wrap(text, width, **kwargs)
+        self.check(result, expect)
+
+    def check_split(self, text: str, expect: Sequence[str]) -> None:
+        result = self.wrapper._split(text)
+        self.assertEqual(result, expect,
+                         "\nexpected %r\n"
+                         "but got  %r" % (expect, result))
+
+
+class WrapTestCase(BaseTestCase):
+
+    def setUp(self) -> None:
+        self.wrapper = TextWrapper(width=45)
+
+    def test_simple(self) -> None:
+        # Simple case: just words, spaces, and a bit of punctuation
+
+        text = "Hello there, how are you this fine day?  I'm glad to hear it!"
+
+        self.check_wrap(text, 12,
+                        ["Hello there,",
+                         "how are you",
+                         "this fine",
+                         "day?  I'm",
+                         "glad to hear",
+                         "it!"])
+        self.check_wrap(text, 42,
+                        ["Hello there, how are you this fine day?",
+                         "I'm glad to hear it!"])
+        self.check_wrap(text, 80, [text])
+
+
+    def test_whitespace(self) -> None:
+        # Whitespace munging and end-of-sentence detection
+
+        text = """\
+This is a paragraph that already has
+line breaks.  But some of its lines are much longer than the others,
+so it needs to be wrapped.
+Some lines are \ttabbed too.
+What a mess!
+"""
+
+        expect = ["This is a paragraph that already has line",
+                  "breaks.  But some of its lines are much",
+                  "longer than the others, so it needs to be",
+                  "wrapped.  Some lines are  tabbed too.  What a",
+                  "mess!"]
+
+        wrapper = TextWrapper(45, fix_sentence_endings=True)
+        result = wrapper.wrap(text)
+        self.check(result, expect)
+
+        results = wrapper.fill(text)
+        self.check(results, '\n'.join(expect))
+
+    def test_fix_sentence_endings(self) -> None:
+        wrapper = TextWrapper(60, fix_sentence_endings=True)
+
+        # SF #847346: ensure that fix_sentence_endings=True does the
+        # right thing even on input short enough that it doesn't need to
+        # be wrapped.
+        text = "A short line. Note the single space."
+        expect = ["A short line.  Note the single space."]
+        self.check(wrapper.wrap(text), expect)
+
+        # Test some of the hairy end cases that _fix_sentence_endings()
+        # is supposed to handle (the easy stuff is tested in
+        # test_whitespace() above).
+        text = "Well, Doctor? What do you think?"
+        expect = ["Well, Doctor?  What do you think?"]
+        self.check(wrapper.wrap(text), expect)
+
+        text = "Well, Doctor?\nWhat do you think?"
+        self.check(wrapper.wrap(text), expect)
+
+        text = 'I say, chaps! Anyone for "tennis?"\nHmmph!'
+        expect = ['I say, chaps!  Anyone for "tennis?"  Hmmph!']
+        self.check(wrapper.wrap(text), expect)
+
+        wrapper.width = 20
+        expect = ['I say, chaps!', 'Anyone for "tennis?"', 'Hmmph!']
+        self.check(wrapper.wrap(text), expect)
+
+        text = 'And she said, "Go to hell!"\nCan you believe that?'
+        expect = ['And she said, "Go to',
+                  'hell!"  Can you',
+                  'believe that?']
+        self.check(wrapper.wrap(text), expect)
+
+        wrapper.width = 60
+        expect = ['And she said, "Go to hell!"  Can you believe that?']
+        self.check(wrapper.wrap(text), expect)
+
+        text = 'File stdio.h is nice.'
+        expect = ['File stdio.h is nice.']
+        self.check(wrapper.wrap(text), expect)
+
+    def test_wrap_short(self) -> None:
+        # Wrapping to make short lines longer
+
+        text = "This is a\nshort paragraph."
+
+        self.check_wrap(text, 20, ["This is a short",
+                                   "paragraph."])
+        self.check_wrap(text, 40, ["This is a short paragraph."])
+
+
+    def test_wrap_short_1line(self) -> None:
+        # Test endcases
+
+        text = "This is a short line."
+
+        self.check_wrap(text, 30, ["This is a short line."])
+        self.check_wrap(text, 30, ["(1) This is a short line."],
+                        initial_indent="(1) ")
+
+
+    def test_hyphenated(self) -> None:
+        # Test breaking hyphenated words
+
+        text = ("this-is-a-useful-feature-for-"
+                "reformatting-posts-from-tim-peters'ly")
+
+        self.check_wrap(text, 40,
+                        ["this-is-a-useful-feature-for-",
+                         "reformatting-posts-from-tim-peters'ly"])
+        self.check_wrap(text, 41,
+                        ["this-is-a-useful-feature-for-",
+                         "reformatting-posts-from-tim-peters'ly"])
+        self.check_wrap(text, 42,
+                        ["this-is-a-useful-feature-for-reformatting-",
+                         "posts-from-tim-peters'ly"])
+
+    def test_hyphenated_numbers(self) -> None:
+        # Test that hyphenated numbers (eg. dates) are not broken like words.
+        text = ("Python 1.0.0 was released on 1994-01-26.  Python 1.0.1 was\n"
+                "released on 1994-02-15.")
+
+        self.check_wrap(text, 30, ['Python 1.0.0 was released on',
+                                   '1994-01-26.  Python 1.0.1 was',
+                                   'released on 1994-02-15.'])
+        self.check_wrap(text, 40, ['Python 1.0.0 was released on 1994-01-26.',
+                                   'Python 1.0.1 was released on 1994-02-15.'])
+
+        text = "I do all my shopping at 7-11."
+        self.check_wrap(text, 25, ["I do all my shopping at",
+                                   "7-11."])
+        self.check_wrap(text, 27, ["I do all my shopping at",
+                                   "7-11."])
+        self.check_wrap(text, 29, ["I do all my shopping at 7-11."])
+
+    def test_em_dash(self) -> None:
+        # Test text with em-dashes
+        text = "Em-dashes should be written -- thus."
+        self.check_wrap(text, 25,
+                        ["Em-dashes should be",
+                         "written -- thus."])
+
+        # Probe the boundaries of the properly written em-dash,
+        # ie. " -- ".
+        self.check_wrap(text, 29,
+                        ["Em-dashes should be written",
+                         "-- thus."])
+        expect = ["Em-dashes should be written --",
+                  "thus."]
+        self.check_wrap(text, 30, expect)
+        self.check_wrap(text, 35, expect)
+        self.check_wrap(text, 36,
+                        ["Em-dashes should be written -- thus."])
+
+        # The improperly written em-dash is handled too, because
+        # it's adjacent to non-whitespace on both sides.
+        text = "You can also do--this or even---this."
+        expect = ["You can also do",
+                  "--this or even",
+                  "---this."]
+        self.check_wrap(text, 15, expect)
+        self.check_wrap(text, 16, expect)
+        expect = ["You can also do--",
+                  "this or even---",
+                  "this."]
+        self.check_wrap(text, 17, expect)
+        self.check_wrap(text, 19, expect)
+        expect = ["You can also do--this or even",
+                  "---this."]
+        self.check_wrap(text, 29, expect)
+        self.check_wrap(text, 31, expect)
+        expect = ["You can also do--this or even---",
+                  "this."]
+        self.check_wrap(text, 32, expect)
+        self.check_wrap(text, 35, expect)
+
+        # All of the above behaviour could be deduced by probing the
+        # _split() method.
+        text = "Here's an -- em-dash and--here's another---and another!"
+        expect = ["Here's", " ", "an", " ", "--", " ", "em-", "dash", " ",
+                  "and", "--", "here's", " ", "another", "---",
+                  "and", " ", "another!"]
+        self.check_split(text, expect)
+
+        text = "and then--bam!--he was gone"
+        expect = ["and", " ", "then", "--", "bam!", "--",
+                  "he", " ", "was", " ", "gone"]
+        self.check_split(text, expect)
+
+
+    def test_unix_options (self) -> None:
+        # Test that Unix-style command-line options are wrapped correctly.
+        # Both Optik (OptionParser) and Docutils rely on this behaviour!
+
+        text = "You should use the -n option, or --dry-run in its long form."
+        self.check_wrap(text, 20,
+                        ["You should use the",
+                         "-n option, or --dry-",
+                         "run in its long",
+                         "form."])
+        self.check_wrap(text, 21,
+                        ["You should use the -n",
+                         "option, or --dry-run",
+                         "in its long form."])
+        expect = ["You should use the -n option, or",
+                  "--dry-run in its long form."]
+        self.check_wrap(text, 32, expect)
+        self.check_wrap(text, 34, expect)
+        self.check_wrap(text, 35, expect)
+        self.check_wrap(text, 38, expect)
+        expect = ["You should use the -n option, or --dry-",
+                  "run in its long form."]
+        self.check_wrap(text, 39, expect)
+        self.check_wrap(text, 41, expect)
+        expect = ["You should use the -n option, or --dry-run",
+                  "in its long form."]
+        self.check_wrap(text, 42, expect)
+
+        # Again, all of the above can be deduced from _split().
+        text = "the -n option, or --dry-run or --dryrun"
+        expect = ["the", " ", "-n", " ", "option,", " ", "or", " ",
+                  "--dry-", "run", " ", "or", " ", "--dryrun"]
+        self.check_split(text, expect)
+
+    def test_funky_hyphens (self) -> None:
+        # Screwy edge cases cooked up by David Goodger.  All reported
+        # in SF bug #596434.
+        self.check_split("what the--hey!", ["what", " ", "the", "--", "hey!"])
+        self.check_split("what the--", ["what", " ", "the--"])
+        self.check_split("what the--.", ["what", " ", "the--."])
+        self.check_split("--text--.", ["--text--."])
+
+        # When I first read bug #596434, this is what I thought David
+        # was talking about.  I was wrong; these have always worked
+        # fine.  The real problem is tested in test_funky_parens()
+        # below...
+        self.check_split("--option", ["--option"])
+        self.check_split("--option-opt", ["--option-", "opt"])
+        self.check_split("foo --option-opt bar",
+                         ["foo", " ", "--option-", "opt", " ", "bar"])
+
+    def test_punct_hyphens(self) -> None:
+        # Oh bother, SF #965425 found another problem with hyphens --
+        # hyphenated words in single quotes weren't handled correctly.
+        # In fact, the bug is that *any* punctuation around a hyphenated
+        # word was handled incorrectly, except for a leading "--", which
+        # was special-cased for Optik and Docutils.  So test a variety
+        # of styles of punctuation around a hyphenated word.
+        # (Actually this is based on an Optik bug report, #813077).
+        self.check_split("the 'wibble-wobble' widget",
+                         ['the', ' ', "'wibble-", "wobble'", ' ', 'widget'])
+        self.check_split('the "wibble-wobble" widget',
+                         ['the', ' ', '"wibble-', 'wobble"', ' ', 'widget'])
+        self.check_split("the (wibble-wobble) widget",
+                         ['the', ' ', "(wibble-", "wobble)", ' ', 'widget'])
+        self.check_split("the ['wibble-wobble'] widget",
+                         ['the', ' ', "['wibble-", "wobble']", ' ', 'widget'])
+
+    def test_funky_parens (self) -> None:
+        # Second part of SF bug #596434: long option strings inside
+        # parentheses.
+        self.check_split("foo (--option) bar",
+                         ["foo", " ", "(--option)", " ", "bar"])
+
+        # Related stuff -- make sure parens work in simpler contexts.
+        self.check_split("foo (bar) baz",
+                         ["foo", " ", "(bar)", " ", "baz"])
+        self.check_split("blah (ding dong), wubba",
+                         ["blah", " ", "(ding", " ", "dong),",
+                          " ", "wubba"])
+
+    def test_initial_whitespace(self) -> None:
+        # SF bug #622849 reported inconsistent handling of leading
+        # whitespace; let's test that a bit, shall we?
+        text = " This is a sentence with leading whitespace."
+        self.check_wrap(text, 50,
+                        [" This is a sentence with leading whitespace."])
+        self.check_wrap(text, 30,
+                        [" This is a sentence with", "leading whitespace."])
+
+    def test_no_drop_whitespace(self) -> None:
+        # SF patch #1581073
+        text = " This is a    sentence with     much whitespace."
+        self.check_wrap(text, 10,
+                        [" This is a", "    ", "sentence ",
+                         "with     ", "much white", "space."],
+                        drop_whitespace=False)
+
+    def test_split(self) -> None:
+        # Ensure that the standard _split() method works as advertised
+        # in the comments
+
+        text = "Hello there -- you goof-ball, use the -b option!"
+
+        result = self.wrapper._split(text)
+        self.check(result,
+             ["Hello", " ", "there", " ", "--", " ", "you", " ", "goof-",
+              "ball,", " ", "use", " ", "the", " ", "-b", " ",  "option!"])
+
+    def test_break_on_hyphens(self) -> None:
+        # Ensure that the break_on_hyphens attributes work
+        text = "yaba daba-doo"
+        self.check_wrap(text, 10, ["yaba daba-", "doo"],
+                        break_on_hyphens=True)
+        self.check_wrap(text, 10, ["yaba", "daba-doo"],
+                        break_on_hyphens=False)
+
+    def test_bad_width(self) -> None:
+        # Ensure that width <= 0 is caught.
+        text = "Whatever, it doesn't matter."
+        self.assertRaises(ValueError, wrap, text, 0)
+        self.assertRaises(ValueError, wrap, text, -1)
+
+    def test_no_split_at_umlaut(self) -> None:
+        text = "Die Empf\xe4nger-Auswahl"
+        self.check_wrap(text, 13, ["Die", "Empf\xe4nger-", "Auswahl"])
+
+    def test_umlaut_followed_by_dash(self) -> None:
+        text = "aa \xe4\xe4-\xe4\xe4"
+        self.check_wrap(text, 7, ["aa \xe4\xe4-", "\xe4\xe4"])
+
+
+class LongWordTestCase (BaseTestCase):
+    def setUp(self) -> None:
+        self.wrapper = TextWrapper()
+        self.text = '''\
+Did you say "supercalifragilisticexpialidocious?"
+How *do* you spell that odd word, anyways?
+'''
+
+    def test_break_long(self) -> None:
+        # Wrap text with long words and lots of punctuation
+
+        self.check_wrap(self.text, 30,
+                        ['Did you say "supercalifragilis',
+                         'ticexpialidocious?" How *do*',
+                         'you spell that odd word,',
+                         'anyways?'])
+        self.check_wrap(self.text, 50,
+                        ['Did you say "supercalifragilisticexpialidocious?"',
+                         'How *do* you spell that odd word, anyways?'])
+
+        # SF bug 797650.  Prevent an infinite loop by making sure that at
+        # least one character gets split off on every pass.
+        self.check_wrap('-'*10+'hello', 10,
+                        ['----------',
+                         '               h',
+                         '               e',
+                         '               l',
+                         '               l',
+                         '               o'],
+                        subsequent_indent = ' '*15)
+
+        # bug 1146.  Prevent a long word to be wrongly wrapped when the
+        # preceding word is exactly one character shorter than the width
+        self.check_wrap(self.text, 12,
+                        ['Did you say ',
+                         '"supercalifr',
+                         'agilisticexp',
+                         'ialidocious?',
+                         '" How *do*',
+                         'you spell',
+                         'that odd',
+                         'word,',
+                         'anyways?'])
+
+    def test_nobreak_long(self) -> None:
+        # Test with break_long_words disabled
+        self.wrapper.break_long_words = False
+        self.wrapper.width = 30
+        expect = ['Did you say',
+                  '"supercalifragilisticexpialidocious?"',
+                  'How *do* you spell that odd',
+                  'word, anyways?'
+                  ]
+        result = self.wrapper.wrap(self.text)
+        self.check(result, expect)
+
+        # Same thing with kwargs passed to standalone wrap() function.
+        result = wrap(self.text, width=30, break_long_words=0)
+        self.check(result, expect)
+
+
+class IndentTestCases(BaseTestCase):
+
+    # called before each test method
+    def setUp(self) -> None:
+        self.text = '''\
+This paragraph will be filled, first without any indentation,
+and then with some (including a hanging indent).'''
+
+
+    def test_fill(self) -> None:
+        # Test the fill() method
+
+        expect = '''\
+This paragraph will be filled, first
+without any indentation, and then with
+some (including a hanging indent).'''
+
+        result = fill(self.text, 40)
+        self.check(result, expect)
+
+
+    def test_initial_indent(self) -> None:
+        # Test initial_indent parameter
+
+        expect = ["     This paragraph will be filled,",
+                  "first without any indentation, and then",
+                  "with some (including a hanging indent)."]
+        result = wrap(self.text, 40, initial_indent="     ")
+        self.check(result, expect)
+
+        expects = "\n".join(expect)
+        results = fill(self.text, 40, initial_indent="     ")
+        self.check(results, expects)
+
+
+    def test_subsequent_indent(self) -> None:
+        # Test subsequent_indent parameter
+
+        expect = '''\
+  * This paragraph will be filled, first
+    without any indentation, and then
+    with some (including a hanging
+    indent).'''
+
+        result = fill(self.text, 40,
+                      initial_indent="  * ", subsequent_indent="    ")
+        self.check(result, expect)
+
+
+# Despite the similar names, DedentTestCase is *not* the inverse
+# of IndentTestCase!
+class DedentTestCase(unittest.TestCase):
+
+    def assertUnchanged(self, text: str) -> None:
+        """assert that dedent() has no effect on 'text'"""
+        self.assertEqual(text, dedent(text))
+
+    def test_dedent_nomargin(self) -> None:
+        # No lines indented.
+        text = "Hello there.\nHow are you?\nOh good, I'm glad."
+        self.assertUnchanged(text)
+
+        # Similar, with a blank line.
+        text = "Hello there.\n\nBoo!"
+        self.assertUnchanged(text)
+
+        # Some lines indented, but overall margin is still zero.
+        text = "Hello there.\n  This is indented."
+        self.assertUnchanged(text)
+
+        # Again, add a blank line.
+        text = "Hello there.\n\n  Boo!\n"
+        self.assertUnchanged(text)
+
+    def test_dedent_even(self) -> None:
+        # All lines indented by two spaces.
+        text = "  Hello there.\n  How are ya?\n  Oh good."
+        expect = "Hello there.\nHow are ya?\nOh good."
+        self.assertEqual(expect, dedent(text))
+
+        # Same, with blank lines.
+        text = "  Hello there.\n\n  How are ya?\n  Oh good.\n"
+        expect = "Hello there.\n\nHow are ya?\nOh good.\n"
+        self.assertEqual(expect, dedent(text))
+
+        # Now indent one of the blank lines.
+        text = "  Hello there.\n  \n  How are ya?\n  Oh good.\n"
+        expect = "Hello there.\n\nHow are ya?\nOh good.\n"
+        self.assertEqual(expect, dedent(text))
+
+    def test_dedent_uneven(self) -> None:
+        # Lines indented unevenly.
+        text = '''\
+        def foo():
+            while 1:
+                return foo
+        '''
+        expect = '''\
+def foo():
+    while 1:
+        return foo
+'''
+        self.assertEqual(expect, dedent(text))
+
+        # Uneven indentation with a blank line.
+        text = "  Foo\n    Bar\n\n   Baz\n"
+        expect = "Foo\n  Bar\n\n Baz\n"
+        self.assertEqual(expect, dedent(text))
+
+        # Uneven indentation with a whitespace-only line.
+        text = "  Foo\n    Bar\n \n   Baz\n"
+        expect = "Foo\n  Bar\n\n Baz\n"
+        self.assertEqual(expect, dedent(text))
+
+    # dedent() should not mangle internal tabs
+    def test_dedent_preserve_internal_tabs(self) -> None:
+        text = "  hello\tthere\n  how are\tyou?"
+        expect = "hello\tthere\nhow are\tyou?"
+        self.assertEqual(expect, dedent(text))
+
+        # make sure that it preserves tabs when it's not making any
+        # changes at all
+        self.assertEqual(expect, dedent(expect))
+
+    # dedent() should not mangle tabs in the margin (i.e.
+    # tabs and spaces both count as margin, but are *not*
+    # considered equivalent)
+    def test_dedent_preserve_margin_tabs(self) -> None:
+        text = "  hello there\n\thow are you?"
+        self.assertUnchanged(text)
+
+        # same effect even if we have 8 spaces
+        text = "        hello there\n\thow are you?"
+        self.assertUnchanged(text)
+
+        # dedent() only removes whitespace that can be uniformly removed!
+        text = "\thello there\n\thow are you?"
+        expect = "hello there\nhow are you?"
+        self.assertEqual(expect, dedent(text))
+
+        text = "  \thello there\n  \thow are you?"
+        self.assertEqual(expect, dedent(text))
+
+        text = "  \t  hello there\n  \t  how are you?"
+        self.assertEqual(expect, dedent(text))
+
+        text = "  \thello there\n  \t  how are you?"
+        expect = "hello there\n  how are you?"
+        self.assertEqual(expect, dedent(text))
+
+
+def test_main() -> None:
+    support.run_unittest(WrapTestCase,
+                              LongWordTestCase,
+                              IndentTestCases,
+                              DedentTestCase)
+
+if __name__ == '__main__':
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/tf_inherit_check.py b/test-data/stdlib-samples/3.2/test/tf_inherit_check.py
new file mode 100644
index 0000000..92ebd95
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/tf_inherit_check.py
@@ -0,0 +1,25 @@
+# Helper script for test_tempfile.py.  argv[2] is the number of a file
+# descriptor which should _not_ be open.  Check this by attempting to
+# write to it -- if we succeed, something is wrong.
+
+import sys
+import os
+
+verbose = (sys.argv[1] == 'v')
+try:
+    fd = int(sys.argv[2])
+
+    try:
+        os.write(fd, b"blat")
+    except os.error:
+        # Success -- could not write to fd.
+        sys.exit(0)
+    else:
+        if verbose:
+            sys.stderr.write("fd %d is open in child" % fd)
+        sys.exit(1)
+
+except Exception:
+    if verbose:
+        raise
+    sys.exit(1)
diff --git a/test-data/stdlib-samples/3.2/textwrap.py b/test-data/stdlib-samples/3.2/textwrap.py
new file mode 100644
index 0000000..a6d0266
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/textwrap.py
@@ -0,0 +1,391 @@
+"""Text wrapping and filling.
+"""
+
+# Copyright (C) 1999-2001 Gregory P. Ward.
+# Copyright (C) 2002, 2003 Python Software Foundation.
+# Written by Greg Ward <gward at python.net>
+
+import string, re
+
+from typing import Dict, List, Any
+
+__all__ = ['TextWrapper', 'wrap', 'fill', 'dedent']
+
+# Hardcode the recognized whitespace characters to the US-ASCII
+# whitespace characters.  The main reason for doing this is that in
+# ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales
+# that character winds up in string.whitespace.  Respecting
+# string.whitespace in those cases would 1) make textwrap treat 0xa0 the
+# same as any other whitespace char, which is clearly wrong (it's a
+# *non-breaking* space), 2) possibly cause problems with Unicode,
+# since 0xa0 is not in range(128).
+_whitespace = '\t\n\x0b\x0c\r '
+
+class TextWrapper:
+    """
+    Object for wrapping/filling text.  The public interface consists of
+    the wrap() and fill() methods; the other methods are just there for
+    subclasses to override in order to tweak the default behaviour.
+    If you want to completely replace the main wrapping algorithm,
+    you'll probably have to override _wrap_chunks().
+
+    Several instance attributes control various aspects of wrapping:
+      width (default: 70)
+        the maximum width of wrapped lines (unless break_long_words
+        is false)
+      initial_indent (default: "")
+        string that will be prepended to the first line of wrapped
+        output.  Counts towards the line's width.
+      subsequent_indent (default: "")
+        string that will be prepended to all lines save the first
+        of wrapped output; also counts towards each line's width.
+      expand_tabs (default: true)
+        Expand tabs in input text to spaces before further processing.
+        Each tab will become 1 .. 8 spaces, depending on its position in
+        its line.  If false, each tab is treated as a single character.
+      replace_whitespace (default: true)
+        Replace all whitespace characters in the input text by spaces
+        after tab expansion.  Note that if expand_tabs is false and
+        replace_whitespace is true, every tab will be converted to a
+        single space!
+      fix_sentence_endings (default: false)
+        Ensure that sentence-ending punctuation is always followed
+        by two spaces.  Off by default because the algorithm is
+        (unavoidably) imperfect.
+      break_long_words (default: true)
+        Break words longer than 'width'.  If false, those words will not
+        be broken, and some lines might be longer than 'width'.
+      break_on_hyphens (default: true)
+        Allow breaking hyphenated words. If true, wrapping will occur
+        preferably on whitespaces and right after hyphens part of
+        compound words.
+      drop_whitespace (default: true)
+        Drop leading and trailing whitespace from lines.
+    """
+
+    unicode_whitespace_trans = {}  # type: Dict[int, int]
+    uspace = ord(' ')
+    for x in _whitespace:
+        unicode_whitespace_trans[ord(x)] = uspace
+
+    # This funky little regex is just the trick for splitting
+    # text up into word-wrappable chunks.  E.g.
+    #   "Hello there -- you goof-ball, use the -b option!"
+    # splits into
+    #   Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option!
+    # (after stripping out empty strings).
+    wordsep_re = re.compile(
+        r'(\s+|'                                  # any whitespace
+        r'[^\s\w]*\w+[^0-9\W]-(?=\w+[^0-9\W])|'   # hyphenated words
+        r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))')   # em-dash
+
+    # This less funky little regex just split on recognized spaces. E.g.
+    #   "Hello there -- you goof-ball, use the -b option!"
+    # splits into
+    #   Hello/ /there/ /--/ /you/ /goof-ball,/ /use/ /the/ /-b/ /option!/
+    wordsep_simple_re = re.compile(r'(\s+)')
+
+    # XXX this is not locale- or charset-aware -- string.lowercase
+    # is US-ASCII only (and therefore English-only)
+    sentence_end_re = re.compile(r'[a-z]'             # lowercase letter
+                                 r'[\.\!\?]'          # sentence-ending punct.
+                                 r'[\"\']?'           # optional end-of-quote
+                                 r'\Z')               # end of chunk
+
+
+    def __init__(self,
+                 width: int = 70,
+                 initial_indent: str = "",
+                 subsequent_indent: str = "",
+                 expand_tabs: bool = True,
+                 replace_whitespace: bool = True,
+                 fix_sentence_endings: bool = False,
+                 break_long_words: bool = True,
+                 drop_whitespace: bool = True,
+                 break_on_hyphens: bool = True) -> None:
+        self.width = width
+        self.initial_indent = initial_indent
+        self.subsequent_indent = subsequent_indent
+        self.expand_tabs = expand_tabs
+        self.replace_whitespace = replace_whitespace
+        self.fix_sentence_endings = fix_sentence_endings
+        self.break_long_words = break_long_words
+        self.drop_whitespace = drop_whitespace
+        self.break_on_hyphens = break_on_hyphens
+
+
+    # -- Private methods -----------------------------------------------
+    # (possibly useful for subclasses to override)
+
+    def _munge_whitespace(self, text: str) -> str:
+        """_munge_whitespace(text : string) -> string
+
+        Munge whitespace in text: expand tabs and convert all other
+        whitespace characters to spaces.  Eg. " foo\tbar\n\nbaz"
+        becomes " foo    bar  baz".
+        """
+        if self.expand_tabs:
+            text = text.expandtabs()
+        if self.replace_whitespace:
+            text = text.translate(self.unicode_whitespace_trans)
+        return text
+
+
+    def _split(self, text: str) -> List[str]:
+        """_split(text : string) -> [string]
+
+        Split the text to wrap into indivisible chunks.  Chunks are
+        not quite the same as words; see _wrap_chunks() for full
+        details.  As an example, the text
+          Look, goof-ball -- use the -b option!
+        breaks into the following chunks:
+          'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ',
+          'use', ' ', 'the', ' ', '-b', ' ', 'option!'
+        if break_on_hyphens is True, or in:
+          'Look,', ' ', 'goof-ball', ' ', '--', ' ',
+          'use', ' ', 'the', ' ', '-b', ' ', option!'
+        otherwise.
+        """
+        if self.break_on_hyphens is True:
+            chunks = self.wordsep_re.split(text)
+        else:
+            chunks = self.wordsep_simple_re.split(text)
+        chunks = [c for c in chunks if c]
+        return chunks
+
+    def _fix_sentence_endings(self, chunks: List[str]) -> None:
+        """_fix_sentence_endings(chunks : [string])
+
+        Correct for sentence endings buried in 'chunks'.  Eg. when the
+        original text contains "... foo.\nBar ...", munge_whitespace()
+        and split() will convert that to [..., "foo.", " ", "Bar", ...]
+        which has one too few spaces; this method simply changes the one
+        space to two.
+        """
+        i = 0
+        patsearch = self.sentence_end_re.search
+        while i < len(chunks)-1:
+            if chunks[i+1] == " " and patsearch(chunks[i]):
+                chunks[i+1] = "  "
+                i += 2
+            else:
+                i += 1
+
+    def _handle_long_word(self, reversed_chunks: List[str],
+                          cur_line: List[str], cur_len: int,
+                          width: int) -> None:
+        """_handle_long_word(chunks : [string],
+                             cur_line : [string],
+                             cur_len : int, width : int)
+
+        Handle a chunk of text (most likely a word, not whitespace) that
+        is too long to fit in any line.
+        """
+        # Figure out when indent is larger than the specified width, and make
+        # sure at least one character is stripped off on every pass
+        if width < 1:
+            space_left = 1
+        else:
+            space_left = width - cur_len
+
+        # If we're allowed to break long words, then do so: put as much
+        # of the next chunk onto the current line as will fit.
+        if self.break_long_words:
+            cur_line.append(reversed_chunks[-1][:space_left])
+            reversed_chunks[-1] = reversed_chunks[-1][space_left:]
+
+        # Otherwise, we have to preserve the long word intact.  Only add
+        # it to the current line if there's nothing already there --
+        # that minimizes how much we violate the width constraint.
+        elif not cur_line:
+            cur_line.append(reversed_chunks.pop())
+
+        # If we're not allowed to break long words, and there's already
+        # text on the current line, do nothing.  Next time through the
+        # main loop of _wrap_chunks(), we'll wind up here again, but
+        # cur_len will be zero, so the next line will be entirely
+        # devoted to the long word that we can't handle right now.
+
+    def _wrap_chunks(self, chunks: List[str]) -> List[str]:
+        """_wrap_chunks(chunks : [string]) -> [string]
+
+        Wrap a sequence of text chunks and return a list of lines of
+        length 'self.width' or less.  (If 'break_long_words' is false,
+        some lines may be longer than this.)  Chunks correspond roughly
+        to words and the whitespace between them: each chunk is
+        indivisible (modulo 'break_long_words'), but a line break can
+        come between any two chunks.  Chunks should not have internal
+        whitespace; ie. a chunk is either all whitespace or a "word".
+        Whitespace chunks will be removed from the beginning and end of
+        lines, but apart from that whitespace is preserved.
+        """
+        lines = []  # type: List[str]
+        if self.width <= 0:
+            raise ValueError("invalid width %r (must be > 0)" % self.width)
+
+        # Arrange in reverse order so items can be efficiently popped
+        # from a stack of chucks.
+        chunks.reverse()
+
+        while chunks:
+
+            # Start the list of chunks that will make up the current line.
+            # cur_len is just the length of all the chunks in cur_line.
+            cur_line = []  # type: List[str]
+            cur_len = 0
+
+            # Figure out which static string will prefix this line.
+            if lines:
+                indent = self.subsequent_indent
+            else:
+                indent = self.initial_indent
+
+            # Maximum width for this line.
+            width = self.width - len(indent)
+
+            # First chunk on line is whitespace -- drop it, unless this
+            # is the very beginning of the text (ie. no lines started yet).
+            if self.drop_whitespace and chunks[-1].strip() == '' and lines:
+                del chunks[-1]
+
+            while chunks:
+                l = len(chunks[-1])
+
+                # Can at least squeeze this chunk onto the current line.
+                if cur_len + l <= width:
+                    cur_line.append(chunks.pop())
+                    cur_len += l
+
+                # Nope, this line is full.
+                else:
+                    break
+
+            # The current line is full, and the next chunk is too big to
+            # fit on *any* line (not just this one).
+            if chunks and len(chunks[-1]) > width:
+                self._handle_long_word(chunks, cur_line, cur_len, width)
+
+            # If the last chunk on this line is all whitespace, drop it.
+            if self.drop_whitespace and cur_line and cur_line[-1].strip() == '':
+                del cur_line[-1]
+
+            # Convert current line back to a string and store it in list
+            # of all lines (return value).
+            if cur_line:
+                lines.append(indent + ''.join(cur_line))
+
+        return lines
+
+
+    # -- Public interface ----------------------------------------------
+
+    def wrap(self, text: str) -> List[str]:
+        """wrap(text : string) -> [string]
+
+        Reformat the single paragraph in 'text' so it fits in lines of
+        no more than 'self.width' columns, and return a list of wrapped
+        lines.  Tabs in 'text' are expanded with string.expandtabs(),
+        and all other whitespace characters (including newline) are
+        converted to space.
+        """
+        text = self._munge_whitespace(text)
+        chunks = self._split(text)
+        if self.fix_sentence_endings:
+            self._fix_sentence_endings(chunks)
+        return self._wrap_chunks(chunks)
+
+    def fill(self, text: str) -> str:
+        """fill(text : string) -> string
+
+        Reformat the single paragraph in 'text' to fit in lines of no
+        more than 'self.width' columns, and return a new string
+        containing the entire wrapped paragraph.
+        """
+        return "\n".join(self.wrap(text))
+
+
+# -- Convenience interface ---------------------------------------------
+
+def wrap(text: str, width: int = 70, **kwargs: Any) -> List[str]:
+    """Wrap a single paragraph of text, returning a list of wrapped lines.
+
+    Reformat the single paragraph in 'text' so it fits in lines of no
+    more than 'width' columns, and return a list of wrapped lines.  By
+    default, tabs in 'text' are expanded with string.expandtabs(), and
+    all other whitespace characters (including newline) are converted to
+    space.  See TextWrapper class for available keyword args to customize
+    wrapping behaviour.
+    """
+    w = TextWrapper(width=width, **kwargs)
+    return w.wrap(text)
+
+def fill(text: str, width: int = 70, **kwargs: Any) -> str:
+    """Fill a single paragraph of text, returning a new string.
+
+    Reformat the single paragraph in 'text' to fit in lines of no more
+    than 'width' columns, and return a new string containing the entire
+    wrapped paragraph.  As with wrap(), tabs are expanded and other
+    whitespace characters converted to space.  See TextWrapper class for
+    available keyword args to customize wrapping behaviour.
+    """
+    w = TextWrapper(width=width, **kwargs)
+    return w.fill(text)
+
+
+# -- Loosely related functionality -------------------------------------
+
+_whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE)
+_leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE)
+
+def dedent(text: str) -> str:
+    """Remove any common leading whitespace from every line in `text`.
+
+    This can be used to make triple-quoted strings line up with the left
+    edge of the display, while still presenting them in the source code
+    in indented form.
+
+    Note that tabs and spaces are both treated as whitespace, but they
+    are not equal: the lines "  hello" and "\thello" are
+    considered to have no common leading whitespace.  (This behaviour is
+    new in Python 2.5; older versions of this module incorrectly
+    expanded tabs before searching for common leading whitespace.)
+    """
+    # Look for the longest leading string of spaces and tabs common to
+    # all lines.
+    margin = None # type: str
+    text = _whitespace_only_re.sub('', text)
+    indents = _leading_whitespace_re.findall(text)
+    for indent in indents:
+        if margin is None:
+            margin = indent
+
+        # Current line more deeply indented than previous winner:
+        # no change (previous winner is still on top).
+        elif indent.startswith(margin):
+            pass
+
+        # Current line consistent with and no deeper than previous winner:
+        # it's the new winner.
+        elif margin.startswith(indent):
+            margin = indent
+
+        # Current line and previous winner have no common whitespace:
+        # there is no margin.
+        else:
+            margin = ""
+            break
+
+    # sanity check (testing/debugging only)
+    if 0 and margin:
+        for line in text.split("\n"):
+            assert not line or line.startswith(margin), \
+                   "line = %r, margin = %r" % (line, margin)
+
+    if margin:
+        text = re.sub(r'(?m)^' + margin, '', text)
+    return text
+
+if __name__ == "__main__":
+    #print dedent("\tfoo\n\tbar")
+    #print dedent("  \thello there\n  \t  how are you?")
+    print(dedent("Hello there.\n  This is indented."))
diff --git a/test-data/unit/README.md b/test-data/unit/README.md
new file mode 100644
index 0000000..693e7f4
--- /dev/null
+++ b/test-data/unit/README.md
@@ -0,0 +1,180 @@
+Tests
+=====
+
+
+Quick Start
+-----------
+
+To add a simple unit test for a new feature you developed, open or create a
+`test-data/unit/check-*.test` file with a name that roughly relates to the
+feature you added.
+
+Add the test in this format anywhere in the file:
+
+    [case testNewSyntaxBasics]
+    # flags: --python-version 3.6
+    x: int
+    x = 5
+    y: int = 5
+
+    a: str
+    a = 5  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    b: str = 5  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+    zzz: int
+    zzz: str  # E: Name 'zzz' already defined
+
+- no code here is executed, just type checked
+- optional `# flags: ` indicates which flags to use for this unit test
+- `# E: abc...` indicates that this line should result in type check error
+with text "abc..."
+- note a space after `E:` and `flags:`
+- `# E:12` adds column number to the expected error
+- repeating `# E: ` several times in one line indicates multiple expected errors in one line
+- `W: ...` and `N: ...` works exactly like `E:`, but report a warning and a note respectively
+- lines that don't contain the above should cause no type check errors
+- optional `[builtins fixtures/...]` tells the type checker to use
+stubs from the indicated file (see Fixtures section below)
+- optional `[out]` is an alternative to the "# E:" notation: it indicates that
+any text after it contains the expected type checking error messages.
+usually, "E: " is preferred because it makes it easier to associate the
+errors with the code generating them at a glance, and to change the code of
+the test without having to change line numbers in `[out]`
+- an empty `[out]` section has no effect
+- to run just this test, use `pytest -k testNewSyntaxBasics -n0`
+
+
+Fixtures
+--------
+
+The unit tests use minimal stubs for builtins, so a lot of operations are not
+possible. You should generally define any needed classes within the test case
+instead of relying on builtins, though clearly this is not always an option
+(see below for more about stubs in test cases). This way tests run much
+faster and don't break if the stubs change. If your test crashes mysteriously
+even though the code works when run manually, you should make sure you have
+all the stubs you need for your test case, including built-in classes such as
+`list` or `dict`, as these are not included by default.
+
+Where the stubs for builtins come from for a given test:
+
+- The builtins used by default in unit tests live in
+  `test-data/unit/lib-stub`.
+
+- Individual test cases can override the builtins stubs by using
+  `[builtins fixtures/foo.pyi]`; this targets files in `test-data/unit/fixtures`.
+  Feel free to modify existing files there or create new ones as you deem fit.
+
+- Test cases can also use `[typing fixtures/typing-full.pyi]` to use a more
+  complete stub for `typing` that contains the async types, among other things.
+
+- Feel free to add additional stubs to that `fixtures` directory, but
+  generally don't expand files in `lib-stub` without first discussing the
+  addition with other mypy developers, as additions could slow down the test
+  suite.
+
+
+Running tests and linting
+-------------------------
+
+First install any additional dependencies needed for testing:
+
+    $ python3 -m pip install -U -r test-requirements.txt
+
+You must also have a Python 2.7 binary installed that can import the `typing`
+module:
+
+    $ python2 -m pip install -U typing
+
+To run all tests, run the script `runtests.py` in the mypy repository:
+
+    $ ./runtests.py
+
+Note that some tests will be disabled for older python versions.
+
+This will run all tests, including integration and regression tests,
+and will type check mypy and verify that all stubs are valid.
+
+You can run a subset of test suites by passing positive or negative
+filters:
+
+    $ ./runtests.py lex parse -x lint -x stub
+
+For example, to run unit tests only, which run pretty quickly:
+
+    $ ./runtests.py unit-test pytest
+
+The unit test suites are driven by a mixture of test frameworks: mypy's own
+`myunit` framework, and `pytest`, which we're in the process of migrating to.
+Test suites for individual components are in the files `mypy/test/test*.py`.
+You can run many of these individually by doing `runtests.py testfoobar`. For
+finer control over which unit tests are run and how, you can run `py.test` or
+`scripts/myunit` directly, or pass inferior arguments via `-a`:
+
+    $ py.test mypy/test/testcheck.py -v -k MethodCall
+    $ ./runtests.py -v 'pytest mypy/test/testcheck' -a -v -a -k -a MethodCall
+
+    $ PYTHONPATH=$PWD scripts/myunit -m mypy.test.testlex -v '*backslash*'
+    $ ./runtests.py mypy.test.testlex -a -v -a '*backslash*'
+
+You can also run the type checker for manual testing without
+installing it by setting up the Python module search path suitably:
+
+    $ export PYTHONPATH=$PWD
+    $ python<version> -m mypy PROGRAM.py
+
+You will have to manually install the `typing` module if you're running Python
+3.4 or earlier.
+
+You can add the entry scripts to PATH for a single python3 version:
+
+    $ export PATH=$PWD/scripts
+    $ mypy PROGRAM.py
+
+You can check a module or string instead of a file:
+
+    $ mypy PROGRAM.py
+    $ mypy -m MODULE
+    $ mypy -c 'import MODULE'
+
+To run the linter:
+
+    $ ./runtests.py lint
+
+Many test suites store test case descriptions in text files
+(`test-data/unit/*.test`). The module `mypy.test.data` parses these
+descriptions. The package `mypy.myunit` contains the test framework used for
+the non-checker test cases.
+
+Python evaluation test cases are a little different from unit tests
+(`mypy/test/testpythoneval.py`, `test-data/unit/pythoneval.test`). These
+type check programs and run them. Unlike the unit tests, these use the
+full builtins and library stubs instead of minimal ones. Run them using
+`runtests.py testpythoneval`.
+
+`runtests.py` by default runs tests in parallel using as many processes as
+there are logical cores the `runtests.py` process is allowed to use (on
+some platforms this information isn't available, so 2 processes are used by
+default). You can change the number of workers using `-j` option.
+
+All pytest tests run as a single test from the perspective of `runtests.py`,
+and so `-j` option has no effect on them. Instead, `pytest` itself determines
+the number of processes to use. The default (set in `./pytest.ini`) is the
+number of logical cores; this can be overridden using `-n` option.
+
+Note that running more processes than logical cores is likely to
+significantly decrease performance; the relevant count is the number of
+processes used by `runtests.py` plus those used by `pytest`.
+
+
+Coverage reports
+----------------
+
+There is an experimental feature to generate coverage reports.  To use
+this feature, you need to `pip install -U lxml`.  This is an extension
+module and requires various library headers to install; on a
+Debian-derived system the command
+  `apt-get install python3-dev libxml2-dev libxslt1-dev`
+may provide the necessary dependencies.
+
+To use the feature, pass e.g. `--txt-report "$(mktemp -d)"`.
diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test
new file mode 100644
index 0000000..9a0d4af
--- /dev/null
+++ b/test-data/unit/check-abstract.test
@@ -0,0 +1,833 @@
+-- Type checker test cases for abstract classes.
+
+
+-- Subtyping with abstract classes
+-- -------------------------------
+
+
+[case testAbstractClassSubclasses]
+
+from abc import abstractmethod, ABCMeta
+
+i = None # type: I
+j = None # type: J
+a = None # type: A
+b = None # type: B
+c = None # type: C
+
+j = c  # E: Incompatible types in assignment (expression has type "C", variable has type "J")
+a = i  # E: Incompatible types in assignment (expression has type "I", variable has type "A")
+a = j  # E: Incompatible types in assignment (expression has type "J", variable has type "A")
+b = i  # E: Incompatible types in assignment (expression has type "I", variable has type "B")
+
+i = a
+i = b
+i = c
+j = a
+j = b
+a = b
+
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self): pass
+class J(metaclass=ABCMeta):
+    @abstractmethod
+    def g(self): pass
+class A(I, J): pass
+class B(A): pass
+class C(I): pass
+
+[case testAbstractClassSubtypingViaExtension]
+
+from abc import abstractmethod, ABCMeta
+
+i = None # type: I
+j = None # type: J
+a = None # type: A
+o = None # type: object
+
+j = i # E: Incompatible types in assignment (expression has type "I", variable has type "J")
+a = i # E: Incompatible types in assignment (expression has type "I", variable has type "A")
+a = j # E: Incompatible types in assignment (expression has type "J", variable has type "A")
+i = o # E: Incompatible types in assignment (expression has type "object", variable has type "I")
+j = o # E: Incompatible types in assignment (expression has type "object", variable has type "J")
+
+i = a
+j = a
+i = j
+o = i
+o = j
+
+class I(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+class J(I): pass
+class A(J): pass
+
+[case testInheritingAbstractClassInSubclass]
+
+from abc import abstractmethod, ABCMeta
+
+i = None # type: I
+a = None # type: A
+b = None # type: B
+
+i = a # E: Incompatible types in assignment (expression has type "A", variable has type "I")
+b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a = b
+i = b
+
+class I(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+class A: pass
+class B(A, I): pass
+
+
+-- Abstract class objects
+-- ----------------------
+
+
+[case testAbstractClassAsTypeObject]
+
+from abc import abstractmethod, ABCMeta
+
+o = None # type: object
+t = None # type: type
+
+o = I
+t = I
+
+class I(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+
+[case testAbstractClassInCasts]
+from typing import cast
+from abc import abstractmethod, ABCMeta
+
+class I(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+class A(I): pass
+class B: pass
+
+i, a, b = None, None, None # type: (I, A, B)
+o = None # type: object
+
+a = cast(I, o) # E: Incompatible types in assignment (expression has type "I", variable has type "A")
+b = cast(B, i) # Ok; a subclass of B might inherit I
+i = cast(I, b) # Ok; a subclass of B might inherit I
+
+i = cast(I, o)
+i = cast(I, a)
+
+[case testInstantiatingClassThatImplementsAbstractMethod]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+class B(A):
+  def f(self): pass
+B()
+[out]
+
+[case testInstantiatingAbstractClass]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta): pass
+class B(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+A() # OK
+B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'f'
+[out]
+
+[case testInstantiatingClassWithInheritedAbstractMethod]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+  @abstractmethod
+  def g(self): pass
+class B(A): pass
+B() # E: Cannot instantiate abstract class 'B' with abstract attributes 'f' and 'g'
+[out]
+
+[case testInstantiationAbstractsInTypeForFunctions]
+from typing import Type
+from abc import abstractmethod
+
+class A:
+    @abstractmethod
+    def m(self) -> None: pass
+class B(A): pass
+class C(B):
+    def m(self) -> None:
+        pass
+
+def f(cls: Type[A]) -> A:
+    return cls()  # OK
+def g() -> A:
+    return A()  # E: Cannot instantiate abstract class 'A' with abstract attribute 'm'
+
+f(A)  # E: Only non-abstract class can be given where 'Type[__main__.A]' is expected
+f(B)  # E: Only non-abstract class can be given where 'Type[__main__.A]' is expected
+f(C)  # OK
+x: Type[B]
+f(x)  # OK
+[out]
+
+[case testInstantiationAbstractsInTypeForAliases]
+from typing import Type
+from abc import abstractmethod
+
+class A:
+    @abstractmethod
+    def m(self) -> None: pass
+class B(A): pass
+class C(B):
+    def m(self) -> None:
+        pass
+
+def f(cls: Type[A]) -> A:
+    return cls()  # OK
+
+Alias = A
+GoodAlias = C
+Alias()  # E: Cannot instantiate abstract class 'A' with abstract attribute 'm'
+GoodAlias()
+f(Alias)  # E: Only non-abstract class can be given where 'Type[__main__.A]' is expected
+f(GoodAlias)
+[out]
+
+[case testInstantiationAbstractsInTypeForVariables]
+from typing import Type
+from abc import abstractmethod
+
+class A:
+    @abstractmethod
+    def m(self) -> None: pass
+class B(A): pass
+class C(B):
+    def m(self) -> None:
+        pass
+
+var: Type[A]
+var()
+var = A # E: Can only assign non-abstract classes to a variable of type 'Type[__main__.A]'
+var = B # E: Can only assign non-abstract classes to a variable of type 'Type[__main__.A]'
+var = C # OK
+
+var_old = None # type: Type[A] # Old syntax for variable annotations
+var_old()
+var_old = A # E: Can only assign non-abstract classes to a variable of type 'Type[__main__.A]'
+var_old = B # E: Can only assign non-abstract classes to a variable of type 'Type[__main__.A]'
+var_old = C # OK
+[out]
+
+[case testInstantiationAbstractsInTypeForClassMethods]
+from typing import Type
+from abc import abstractmethod
+
+class Logger:
+    @staticmethod
+    def log(a: Type[C]):
+        pass
+class C:
+    @classmethod
+    def action(cls) -> None:
+        cls() #OK for classmethods
+        Logger.log(cls)  #OK for classmethods
+    @abstractmethod
+    def m(self) -> None:
+        pass
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testInstantiatingClassWithInheritedAbstractMethodAndSuppression]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def a(self): pass
+    @abstractmethod
+    def b(self): pass
+    @abstractmethod
+    def c(self): pass
+    @abstractmethod
+    def d(self): pass
+    @abstractmethod
+    def e(self): pass
+    @abstractmethod
+    def f(self): pass
+    @abstractmethod
+    def g(self): pass
+    @abstractmethod
+    def h(self): pass
+    @abstractmethod
+    def i(self): pass
+    @abstractmethod
+    def j(self): pass
+a = A() # E: Cannot instantiate abstract class 'A' with abstract attributes 'a', 'b', ... and 'j' (7 methods suppressed)
+[out]
+
+
+-- Implementing abstract methods
+-- -----------------------------
+
+
+[case testImplementingAbstractMethod]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x: int) -> int: pass
+    @abstractmethod
+    def g(self, x: int) -> int: pass
+class B(A):
+    def f(self, x: str) -> int: \
+            # E: Argument 1 of "f" incompatible with supertype "A"
+        pass
+    def g(self, x: int) -> int: pass
+[out]
+
+[case testImplementingAbstractMethodWithMultipleBaseClasses]
+from abc import abstractmethod, ABCMeta
+import typing
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x: int) -> int: pass
+class J(metaclass=ABCMeta):
+    @abstractmethod
+    def g(self, x: str) -> str: pass
+class A(I, J):
+    def f(self, x: str) -> int: pass \
+        # E: Argument 1 of "f" incompatible with supertype "I"
+    def g(self, x: str) -> int: pass \
+        # E: Return type of "g" incompatible with supertype "J"
+    def h(self) -> int: pass # Not related to any base class
+[out]
+
+[case testImplementingAbstractMethodWithExtension]
+from abc import abstractmethod, ABCMeta
+import typing
+class J(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x: int) -> int: pass
+class I(J): pass
+class A(I):
+    def f(self, x: str) -> int: pass \
+        # E: Argument 1 of "f" incompatible with supertype "J"
+[out]
+
+[case testInvalidOverridingAbstractMethod]
+from abc import abstractmethod, ABCMeta
+import typing
+class J(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x: 'J') -> None: pass
+class I(J):
+    @abstractmethod
+    def f(self, x: 'I') -> None: pass # E: Argument 1 of "f" incompatible with supertype "J"
+[out]
+
+[case testAbstractClassCoAndContraVariance]
+from abc import abstractmethod, ABCMeta
+import typing
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, a: A) -> 'I': pass
+    @abstractmethod
+    def g(self, a: A) -> 'I': pass
+    @abstractmethod
+    def h(self, a: 'I') -> A: pass
+class A(I):
+    def h(self, a: 'A') -> 'I': # Fail
+        pass
+    def f(self, a: 'I') -> 'I':
+        pass
+    def g(self, a: 'A') -> 'A':
+        pass
+[out]
+main:11: error: Argument 1 of "h" incompatible with supertype "I"
+main:11: error: Return type of "h" incompatible with supertype "I"
+
+
+-- Accessing abstract members
+-- --------------------------
+
+
+[case testAccessingAbstractMethod]
+
+from abc import abstractmethod, ABCMeta
+
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, a: int) -> str: pass
+
+i, a, b = None, None, None # type: (I, int, str)
+
+a = i.f(a) # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+b = i.f(b) # E: Argument 1 to "f" of "I" has incompatible type "str"; expected "int"
+i.g()      # E: "I" has no attribute "g"
+
+b = i.f(a)
+
+[case testAccessingInheritedAbstractMethod]
+
+from abc import abstractmethod, ABCMeta
+
+class J(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, a: int) -> str: pass
+class I(J): pass
+
+i, a, b = None, None, None # type: (I, int, str)
+
+a = i.f(1) # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+b = i.f(1)
+
+
+-- Any (dynamic) types
+-- -------------------
+
+
+[case testAbstractClassWithAllDynamicTypes]
+from abc import abstractmethod, ABCMeta
+import typing
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x): pass
+    @abstractmethod
+    def g(self, x): pass
+class A(I):
+    def f(self, x): pass
+    def g(self, x, y) -> None: pass \
+        # E: Signature of "g" incompatible with supertype "I"
+[out]
+
+[case testAbstractClassWithAllDynamicTypes2]
+from abc import abstractmethod, ABCMeta
+import typing
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x): pass
+    @abstractmethod
+    def g(self, x): pass
+class A(I):
+    def f(self, x): pass
+    def g(self, x, y): pass
+[out]
+
+[case testAbstractClassWithImplementationUsingDynamicTypes]
+from abc import abstractmethod, ABCMeta
+import typing
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x: int) -> None: pass
+    @abstractmethod
+    def g(self, x: int) -> None: pass
+class A(I):
+    def f(self, x): pass
+    def g(self, x, y): pass
+[out]
+
+
+-- Special cases
+-- -------------
+
+
+[case testMultipleAbstractBases]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self) -> None: pass
+class B(metaclass=ABCMeta):
+  @abstractmethod
+  def g(self) -> None: pass
+class C(A, B):
+  @abstractmethod
+  def h(self) -> None: pass
+
+[case testMemberAccessWithMultipleAbstractBaseClasses]
+
+from abc import abstractmethod, ABCMeta
+
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self) -> None: pass
+class B(metaclass=ABCMeta):
+    @abstractmethod
+    def g(self) -> None: pass
+class C(A, B): pass
+x = None # type: C
+x.f()
+x.g()
+x.f(x) # E: Too many arguments for "f" of "A"
+x.g(x) # E: Too many arguments for "g" of "B"
+
+[case testInstantiatingAbstractClassWithMultipleBaseClasses]
+
+from abc import abstractmethod, ABCMeta
+
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self) -> None: pass
+class B(metaclass=ABCMeta):
+  @abstractmethod
+  def g(self) -> None: pass
+class C(A, B):
+  def f(self) -> None: pass
+class D(A, B):
+  def g(self) -> None: pass
+class E(A, B):
+  def f(self) -> None: pass
+  def g(self) -> None: pass
+C() # E: Cannot instantiate abstract class 'C' with abstract attribute 'g'
+D() # E: Cannot instantiate abstract class 'D' with abstract attribute 'f'
+E()
+
+[case testInconsistentMro]
+from abc import abstractmethod, ABCMeta
+import typing
+
+class A(metaclass=ABCMeta): pass
+class B(object, A): pass \
+      # E: Cannot determine consistent method resolution order (MRO) for "B"
+
+[case testOverloadedAbstractMethod]
+from foo import *
+[file foo.pyi]
+from abc import abstractmethod, ABCMeta
+from typing import overload
+
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  @overload
+  def f(self, x: int) -> int: pass
+  @abstractmethod
+  @overload
+  def f(self, x: str) -> str: pass
+
+class B(A):
+  @overload
+  def f(self, x: int) -> int: pass
+  @overload
+  def f(self, x: str) -> str: pass
+A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'f'
+B()
+B().f(1)
+a = B() # type: A
+a.f(1)
+a.f('')
+a.f(B()) # E: No overload variant of "f" of "A" matches argument types [foo.B]
+
+[case testOverloadedAbstractMethodWithAlternativeDecoratorOrder]
+from foo import *
+[file foo.pyi]
+from abc import abstractmethod, ABCMeta
+from typing import overload
+
+class A(metaclass=ABCMeta):
+  @overload
+  @abstractmethod
+  def f(self, x: int) -> int: pass
+  @overload
+  @abstractmethod
+  def f(self, x: str) -> str: pass
+
+class B(A):
+  @overload
+  def f(self, x: int) -> int: pass
+  @overload
+  def f(self, x: str) -> str: pass
+A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'f'
+B()
+B().f(1)
+a = B() # type: A
+a.f(1)
+a.f('')
+a.f(B()) # E: No overload variant of "f" of "A" matches argument types [foo.B]
+
+[case testOverloadedAbstractMethodVariantMissingDecorator1]
+from foo import *
+[file foo.pyi]
+from abc import abstractmethod, ABCMeta
+from typing import overload
+
+class A(metaclass=ABCMeta):
+  @abstractmethod \
+    # E: Overloaded method has both abstract and non-abstract variants
+  @overload
+  def f(self, x: int) -> int: pass
+  @overload
+  def f(self, x: str) -> str: pass
+[out]
+
+[case testOverloadedAbstractMethodVariantMissingDecorator1]
+from foo import *
+[file foo.pyi]
+from abc import abstractmethod, ABCMeta
+from typing import overload
+
+class A(metaclass=ABCMeta):
+  @overload \
+    # E: Overloaded method has both abstract and non-abstract variants
+  def f(self, x: int) -> int: pass
+  @abstractmethod
+  @overload
+  def f(self, x: str) -> str: pass
+[out]
+
+[case testMultipleInheritanceAndAbstractMethod]
+import typing
+from abc import abstractmethod, ABCMeta
+class A:
+  def f(self, x: str) -> None: pass
+class B(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self, x: str) -> None: pass
+class C(A, B): pass
+
+[case testMultipleInheritanceAndAbstractMethod2]
+import typing
+from abc import abstractmethod, ABCMeta
+class A:
+  def f(self, x: str) -> None: pass
+class B(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self, x: int) -> None: pass
+class C(A, B): pass
+[out]
+main:8: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
+
+[case testCallAbstractMethodBeforeDefinition]
+import typing
+from abc import abstractmethod, ABCMeta
+class A(metaclass=ABCMeta):
+    def f(self) -> None:
+        self.g(1) # E: Argument 1 to "g" of "A" has incompatible type "int"; expected "str"
+    @abstractmethod
+    def g(self, x: str) -> None: pass
+[out]
+
+[case testAbstractOperatorMethods1]
+import typing
+from abc import abstractmethod, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def __lt__(self, other: 'A') -> int: pass
+    @abstractmethod
+    def __gt__(self, other: 'A') -> int: pass
+
+[case testAbstractOperatorMethods2]
+import typing
+from abc import abstractmethod, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def __radd__(self, other: 'C') -> str: pass # Error
+class B:
+    @abstractmethod
+    def __add__(self, other: 'A') -> int: pass
+class C:
+    def __add__(self, other: int) -> B: pass
+[out]
+
+
+-- Abstract properties
+-- -------------------
+
+
+[case testReadOnlyAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+def f(a: A) -> None:
+    a.x() # E: "int" not callable
+    a.x = 1  # E: Property "x" defined in "A" is read-only
+[out]
+
+[case testReadOnlyAbstractPropertyForwardRef]
+from abc import abstractproperty, ABCMeta
+def f(a: A) -> None:
+    a.x() # E: "int" not callable
+    a.x = 1  # E: Property "x" defined in "A" is read-only
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+[out]
+
+[case testReadWriteAbstractProperty]
+from abc import abstractproperty, ABCMeta
+def f(a: A) -> None:
+    a.x.y # E: "int" has no attribute "y"
+    a.x = 1
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, x: int) -> None: pass
+[out]
+
+[case testInstantiateClassWithReadOnlyAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A): pass
+b = B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'x'
+
+[case testInstantiateClassWithReadWriteAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, x: int) -> None: pass
+class B(A): pass
+b = B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'x'
+
+[case testImplementAbstractPropertyViaProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A):
+    @property
+    def x(self) -> int: pass
+b = B()
+b.x() # E: "int" not callable
+[builtins fixtures/property.pyi]
+
+[case testImplementReradWriteAbstractPropertyViaProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, v: int) -> None: pass
+class B(A):
+    @property
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, v: int) -> None: pass
+b = B()
+b.x.y # E: "int" has no attribute "y"
+[builtins fixtures/property.pyi]
+
+[case testImplementAbstractPropertyViaPropertyInvalidType]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A):
+    @property
+    def x(self) -> str: pass # E
+b = B()
+b.x() # E
+[builtins fixtures/property.pyi]
+[out]
+main:7: error: Return type of "x" incompatible with supertype "A"
+main:9: error: "str" not callable
+
+[case testCantImplementAbstractPropertyViaInstanceVariable]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A):
+    def __init__(self) -> None:
+        self.x = 1 # E
+b = B() # E
+b.x.y # E
+[builtins fixtures/property.pyi]
+[out]
+main:7: error: Property "x" defined in "B" is read-only
+main:8: error: Cannot instantiate abstract class 'B' with abstract attribute 'x'
+main:9: error: "int" has no attribute "y"
+
+[case testSuperWithAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A):
+    @property
+    def x(self) -> int:
+        return super().x.y # E: "int" has no attribute "y"
+[builtins fixtures/property.pyi]
+[out]
+
+[case testSuperWithReadWriteAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, v: int) -> None: pass
+class B(A):
+    @property
+    def x(self) -> int:
+        return super().x.y # E
+    @x.setter
+    def x(self, v: int) -> None:
+        super().x = '' # E
+[builtins fixtures/property.pyi]
+[out]
+main:10: error: "int" has no attribute "y"
+main:13: error: Invalid assignment target
+
+[case testOnlyImplementGetterOfReadWriteAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, v: int) -> None: pass
+class B(A):
+    @property # E
+    def x(self) -> int: pass
+b = B()
+b.x.y # E
+[builtins fixtures/property.pyi]
+[out]
+main:8: error: Read-only property cannot override read-write property
+main:11: error: "int" has no attribute "y"
+
+[case testDynamicallyTypedReadOnlyAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self): pass
+def f(a: A) -> None:
+    a.x.y
+    a.x = 1  # E: Property "x" defined in "A" is read-only
+[out]
+
+[case testDynamicallyTypedReadOnlyAbstractPropertyForwardRef]
+from abc import abstractproperty, ABCMeta
+def f(a: A) -> None:
+    a.x.y
+    a.x = 1  # E: Property "x" defined in "A" is read-only
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self): pass
+[out]
+
+[case testDynamicallyTypedReadWriteAbstractProperty]
+from abc import abstractproperty, ABCMeta
+def f(a: A) -> None:
+    a.x.y
+    a.x = 1
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self): pass
+    @x.setter
+    def x(self, x): pass
+[out]
diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test
new file mode 100644
index 0000000..f8ac01d
--- /dev/null
+++ b/test-data/unit/check-async-await.test
@@ -0,0 +1,677 @@
+-- Tests for async def and await (PEP 492)
+-- ---------------------------------------
+
+[case testAsyncDefPass]
+
+async def f() -> int:
+    pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncDefReturn]
+
+async def f() -> int:
+    return 0
+reveal_type(f())  # E: Revealed type is 'typing.Awaitable[builtins.int]'
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncDefMissingReturn]
+# flags: --warn-no-return
+async def f() -> int:
+    make_this_not_trivial = 1
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+main:2: error: Missing return statement
+
+[case testAsyncDefReturnWithoutValue]
+
+async def f() -> int:
+    make_this_not_trivial = 1
+    return
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+main:4: error: Return value expected
+
+[case testAwaitCoroutine]
+
+async def f() -> int:
+    x = await f()
+    reveal_type(x)  # E: Revealed type is 'builtins.int*'
+    return x
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+
+[case testAwaitDefaultContext]
+
+from typing import TypeVar
+T = TypeVar('T')
+async def f(x: T) -> T:
+    y = await f(x)
+    reveal_type(y)
+    return y
+[typing fixtures/typing-full.pyi]
+[out]
+main:6: error: Revealed type is 'T`-1'
+
+[case testAwaitAnyContext]
+
+from typing import Any, TypeVar
+T = TypeVar('T')
+async def f(x: T) -> T:
+    y = await f(x)  # type: Any
+    reveal_type(y)
+    return y
+[typing fixtures/typing-full.pyi]
+[out]
+main:6: error: Revealed type is 'Any'
+
+[case testAwaitExplicitContext]
+
+from typing import TypeVar
+T = TypeVar('T')
+async def f(x: T) -> T:
+    y = await f(x)  # type: int
+    reveal_type(y)
+    return x
+[typing fixtures/typing-full.pyi]
+[out]
+main:5: error: Argument 1 to "f" has incompatible type "T"; expected "int"
+main:6: error: Revealed type is 'builtins.int'
+
+[case testAwaitGeneratorError]
+
+from typing import Any, Generator
+def g() -> Generator[int, None, str]:
+    yield 0
+    return ''
+async def f() -> int:
+    x = await g()
+    return x
+[typing fixtures/typing-full.pyi]
+[out]
+main:7: error: Incompatible types in await (actual type Generator[int, None, str], expected type Awaitable[Any])
+
+[case testAwaitIteratorError]
+
+from typing import Any, Iterator
+def g() -> Iterator[Any]:
+    yield
+async def f() -> int:
+    x = await g()
+    return x
+[typing fixtures/typing-full.pyi]
+[out]
+main:6: error: Incompatible types in await (actual type Iterator[Any], expected type Awaitable[Any])
+
+[case testAwaitArgumentError]
+
+def g() -> int:
+    return 0
+async def f() -> int:
+    x = await g()
+    return x
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+main:5: error: Incompatible types in await (actual type "int", expected type Awaitable[Any])
+
+[case testAwaitResultError]
+
+async def g() -> int:
+    return 0
+async def f() -> str:
+    x = await g()  # type: str
+    return x
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testAwaitReturnError]
+
+async def g() -> int:
+    return 0
+async def f() -> str:
+    x = await g()
+    return x
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+main:6: error: Incompatible return value type (got "int", expected "str")
+
+[case testAsyncFor]
+
+from typing import AsyncIterator
+class C(AsyncIterator[int]):
+    async def __anext__(self) -> int: return 0
+async def f() -> None:
+    async for x in C():
+        reveal_type(x)  # E: Revealed type is 'builtins.int*'
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncForError]
+
+from typing import AsyncIterator
+async def f() -> None:
+    async for x in [1]:
+        pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+main:4: error: AsyncIterable expected
+main:4: error: List[int] has no attribute "__aiter__"
+
+[case testAsyncForTypeComments]
+
+from typing import AsyncIterator, Union
+class C(AsyncIterator[int]):
+    async def __anext__(self) -> int: return 0
+async def f() -> None:
+    async for x in C():  # type: str  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+        pass
+
+    async for y in C():  # type: int
+        pass
+
+    async for z in C():  # type: Union[int, str]
+        reveal_type(z)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncForComprehension]
+# flags: --fast-parser --python-version 3.6
+from typing import Generic, Iterable, TypeVar, AsyncIterator, Tuple
+
+T = TypeVar('T')
+
+class asyncify(Generic[T], AsyncIterator[T]):
+    def __init__(self, iterable: Iterable[T]) -> None:
+        self.iterable = iter(iterable)
+    def __aiter__(self) -> AsyncIterator[T]:
+        return self
+    async def __anext__(self) -> T:
+        try:
+            return next(self.iterable)
+        except StopIteration:
+            raise StopAsyncIteration
+
+async def listcomp(obj: Iterable[int]):
+    lst = [i async for i in asyncify(obj)]
+    reveal_type(lst)  # E: Revealed type is 'builtins.list[builtins.int*]'
+    lst2 = [i async for i in asyncify(obj) for j in obj]
+    reveal_type(lst2)  # E: Revealed type is 'builtins.list[builtins.int*]'
+
+async def setcomp(obj: Iterable[int]):
+    lst = {i async for i in asyncify(obj)}
+    reveal_type(lst)  # E: Revealed type is 'builtins.set[builtins.int*]'
+
+async def dictcomp(obj: Iterable[Tuple[int, str]]):
+    lst = {i: j async for i, j in asyncify(obj)}
+    reveal_type(lst)  # E: Revealed type is 'builtins.dict[builtins.int*, builtins.str*]'
+
+async def generatorexp(obj: Iterable[int]):
+    lst = (i async for i in asyncify(obj))
+    reveal_type(lst)  # E: Revealed type is 'typing.AsyncIterator[builtins.int*]'
+    lst2 = (i async for i in asyncify(obj) for i in obj)
+    reveal_type(lst2)  # E: Revealed type is 'typing.AsyncIterator[builtins.int*]'
+
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncForComprehensionErrors]
+# flags: --fast-parser --python-version 3.6
+from typing import Generic, Iterable, TypeVar, AsyncIterator, Tuple
+
+T = TypeVar('T')
+
+class asyncify(Generic[T], AsyncIterator[T]):
+    def __init__(self, iterable: Iterable[T]) -> None:
+        self.iterable = iter(iterable)
+    def __aiter__(self) -> AsyncIterator[T]:
+        return self
+    async def __anext__(self) -> T:
+        try:
+            return next(self.iterable)
+        except StopIteration:
+            raise StopAsyncIteration
+
+async def wrong_iterable(obj: Iterable[int]):
+    [i async for i in obj]
+    [i for i in asyncify(obj)]
+    {i: i async for i in obj}
+    {i: i for i in asyncify(obj)}
+
+[out]
+main:18: error: AsyncIterable expected
+main:18: error: Iterable[int] has no attribute "__aiter__"; maybe "__iter__"?
+main:19: error: Iterable expected
+main:19: error: asyncify[int] has no attribute "__iter__"; maybe "__aiter__"?
+main:20: error: AsyncIterable expected
+main:20: error: Iterable[int] has no attribute "__aiter__"; maybe "__iter__"?
+main:21: error: Iterable expected
+main:21: error: asyncify[int] has no attribute "__iter__"; maybe "__aiter__"?
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncWith]
+
+class C:
+    async def __aenter__(self) -> int: pass
+    async def __aexit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x:
+        reveal_type(x)  # E: Revealed type is 'builtins.int*'
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+
+[case testAsyncWithError]
+
+class C:
+    def __enter__(self) -> int: pass
+    def __exit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x:
+        pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+main:6: error: "C" has no attribute "__aenter__"; maybe "__enter__"?
+main:6: error: "C" has no attribute "__aexit__"; maybe "__exit__"?
+
+[case testAsyncWithErrorBadAenter]
+
+class C:
+    def __aenter__(self) -> int: pass
+    async def __aexit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x:  # E: Incompatible types in "async with" for __aenter__ (actual type "int", expected type Awaitable[Any])
+        pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncWithErrorBadAenter2]
+
+class C:
+    def __aenter__(self) -> None: pass
+    async def __aexit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x:  # E: None has no attribute "__await__"
+        pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncWithErrorBadAexit]
+
+class C:
+    async def __aenter__(self) -> int: pass
+    def __aexit__(self, x, y, z) -> int: pass
+async def f() -> None:
+    async with C() as x: # E: Incompatible types in "async with" for __aexit__ (actual type "int", expected type Awaitable[Any])
+        pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncWithErrorBadAexit2]
+
+class C:
+    async def __aenter__(self) -> int: pass
+    def __aexit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x: # E: None has no attribute "__await__"
+        pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncWithTypeComments]
+
+class C:
+    async def __aenter__(self) -> int: pass
+    async def __aexit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x:  # type: int
+        pass
+
+    async with C() as y, C() as z:  # type: str, int  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+        pass
+
+    async with C() as a:  # type: int, int  # E: Invalid tuple literal type
+        pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testNoYieldInAsyncDef]
+# flags: --python-version 3.5
+
+async def f():
+    yield None  # E: 'yield' in async function
+async def g():
+    yield  # E: 'yield' in async function
+async def h():
+    x = yield  # E: 'yield' in async function
+[builtins fixtures/async_await.pyi]
+
+[case testNoYieldFromInAsyncDef]
+
+async def f():
+    yield from []
+async def g():
+    x = yield from []
+[builtins fixtures/async_await.pyi]
+[out]
+main:3: error: 'yield from' in async function
+main:5: error: 'yield from' in async function
+
+[case testNoAsyncDefInPY2_python2]
+
+async def f():  # E: invalid syntax
+    pass
+
+[case testYieldFromNoAwaitable]
+
+from typing import Any, Generator
+async def f() -> str:
+    return ''
+def g() -> Generator[Any, None, str]:
+    x = yield from f()
+    return x
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
+main:6: error: "yield from" can't be applied to Awaitable[str]
+
+[case testAwaitableSubclass]
+
+from typing import Any, AsyncIterator, Awaitable, Generator
+class A(Awaitable[int]):
+    def __await__(self) -> Generator[Any, None, int]:
+        yield
+        return 0
+class C:
+    def __aenter__(self) -> A:
+        return A()
+    def __aexit__(self, *a) -> A:
+        return A()
+class I(AsyncIterator[int]):
+    def __aiter__(self) -> 'I':
+        return self
+    def __anext__(self) -> A:
+        return A()
+async def main() -> None:
+    x = await A()
+    reveal_type(x)  # E: Revealed type is 'builtins.int'
+    async with C() as y:
+        reveal_type(y)  # E: Revealed type is 'builtins.int'
+    async for z in I():
+        reveal_type(z)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testYieldTypeCheckInDecoratedCoroutine]
+
+from typing import Generator
+from types import coroutine
+ at coroutine
+def f() -> Generator[int, str, int]:
+    x = yield 0
+    x = yield ''  # E: Incompatible types in yield (actual type "str", expected type "int")
+    reveal_type(x)  # E: Revealed type is 'builtins.str'
+    if x:
+        return 0
+    else:
+        return ''  # E: Incompatible return value type (got "str", expected "int")
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+
+-- Async generators (PEP 525), some test cases adapted from the PEP text
+-- ---------------------------------------------------------------------
+
+[case testAsyncGenerator]
+# flags: --python-version 3.6
+from typing import AsyncGenerator, Generator
+
+async def f() -> int:
+    return 42
+
+async def g() -> AsyncGenerator[int, None]:
+    value = await f()
+    reveal_type(value)  # E: Revealed type is 'builtins.int*'
+    yield value
+
+    yield 'not an int'  # E: Incompatible types in yield (actual type "str", expected type "int")
+    # return without a value is fine
+    return
+reveal_type(g)  # E: Revealed type is 'def () -> typing.AsyncGenerator[builtins.int, builtins.None]'
+reveal_type(g())  # E: Revealed type is 'typing.AsyncGenerator[builtins.int, builtins.None]'
+
+async def h() -> None:
+    async for item in g():
+        reveal_type(item)  # E: Revealed type is 'builtins.int*'
+
+async def wrong_return() -> Generator[int, None, None]:  # E: The return type of an async generator function should be "AsyncGenerator" or one of its supertypes
+    yield 3
+
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncGeneratorReturnIterator]
+# flags: --python-version 3.6
+from typing import AsyncIterator
+
+async def gen() -> AsyncIterator[int]:
+    yield 3
+
+    yield 'not an int'  # E: Incompatible types in yield (actual type "str", expected type "int")
+
+async def use_gen() -> None:
+    async for item in gen():
+        reveal_type(item)  # E: Revealed type is 'builtins.int*'
+
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncGeneratorManualIter]
+# flags: --python-version 3.6
+from typing import AsyncGenerator
+
+async def genfunc() -> AsyncGenerator[int, None]:
+    yield 1
+    yield 2
+
+async def user() -> None:
+    gen = genfunc()
+
+    reveal_type(gen.__aiter__())  # E: Revealed type is 'typing.AsyncGenerator[builtins.int*, builtins.None]'
+
+    reveal_type(await gen.__anext__())  # E: Revealed type is 'builtins.int*'
+
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncGeneratorAsend]
+# flags: --fast-parser --python-version 3.6
+from typing import AsyncGenerator
+
+async def f() -> None:
+    pass
+
+async def gen() -> AsyncGenerator[int, str]:
+    await f()
+    v = yield 42
+    reveal_type(v)  # E: Revealed type is 'builtins.str'
+    await f()
+
+async def h() -> None:
+    g = gen()
+    await g.asend(())  # E: Argument 1 to "asend" of "AsyncGenerator" has incompatible type "Tuple[]"; expected "str"
+    reveal_type(await g.asend('hello'))  # E: Revealed type is 'builtins.int*'
+
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncGeneratorAthrow]
+# flags: --fast-parser --python-version 3.6
+from typing import AsyncGenerator
+
+async def gen() -> AsyncGenerator[str, int]:
+    try:
+        yield 'hello'
+    except BaseException:
+        yield 'world'
+
+async def h() -> None:
+    g = gen()
+    v = await g.asend(1)
+    reveal_type(v)  # E: Revealed type is 'builtins.str*'
+    reveal_type(await g.athrow(BaseException))  # E: Revealed type is 'builtins.str*'
+
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncGeneratorNoSyncIteration]
+# flags: --fast-parser --python-version 3.6
+from typing import AsyncGenerator
+
+async def gen() -> AsyncGenerator[int, None]:
+    for i in (1, 2, 3):
+        yield i
+
+def h() -> None:
+    for i in gen():
+        pass
+
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[out]
+main:9: error: Iterable expected
+main:9: error: AsyncGenerator[int, None] has no attribute "__iter__"; maybe "__aiter__"?
+
+[case testAsyncGeneratorNoYieldFrom]
+# flags: --fast-parser --python-version 3.6
+from typing import AsyncGenerator
+
+async def f() -> AsyncGenerator[int, None]:
+    pass
+
+async def gen() -> AsyncGenerator[int, None]:
+    yield from f()  # E: 'yield from' in async function
+
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testAsyncGeneratorNoReturnWithValue]
+# flags: --fast-parser --python-version 3.6
+from typing import AsyncGenerator
+
+async def return_int() -> AsyncGenerator[int, None]:
+    yield 1
+    return 42  # E: 'return' with value in async generator is not allowed
+
+async def return_none() -> AsyncGenerator[int, None]:
+    yield 1
+    return None  # E: 'return' with value in async generator is not allowed
+
+def f() -> None:
+    return
+
+async def return_f() -> AsyncGenerator[int, None]:
+    yield 1
+    return f()  # E: 'return' with value in async generator is not allowed
+
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+-- The full matrix of coroutine compatibility
+-- ------------------------------------------
+
+[case testFullCoroutineMatrix]
+
+from typing import Any, AsyncIterator, Awaitable, Generator, Iterator
+from types import coroutine
+
+# The various things you might try to use in `await` or `yield from`.
+
+def plain_generator() -> Generator[str, None, int]:
+    yield 'a'
+    return 1
+
+async def plain_coroutine() -> int:
+    return 1
+
+ at coroutine
+def decorated_generator() -> Generator[str, None, int]:
+    yield 'a'
+    return 1
+
+ at coroutine
+async def decorated_coroutine() -> int:
+    return 1
+
+class It(Iterator[str]):
+    def __iter__(self) -> 'It':
+        return self
+    def __next__(self) -> str:
+        return 'a'
+
+def other_iterator() -> It:
+    return It()
+
+class Aw(Awaitable[int]):
+    def __await__(self) -> Generator[str, Any, int]:
+        yield 'a'
+        return 1
+
+def other_coroutine() -> Aw:
+    return Aw()
+
+# The various contexts in which `await` or `yield from` might occur.
+
+def plain_host_generator() -> Generator[str, None, None]:
+    yield 'a'
+    x = 0
+    x = yield from plain_generator()
+    x = yield from plain_coroutine()  # E: "yield from" can't be applied to Awaitable[int]
+    x = yield from decorated_generator()
+    x = yield from decorated_coroutine()  # E: "yield from" can't be applied to AwaitableGenerator[Any, Any, int, Awaitable[int]]
+    x = yield from other_iterator()
+    x = yield from other_coroutine()  # E: "yield from" can't be applied to "Aw"
+
+async def plain_host_coroutine() -> None:
+    x = 0
+    x = await plain_generator()  # E: Incompatible types in await (actual type Generator[str, None, int], expected type Awaitable[Any])
+    x = await plain_coroutine()
+    x = await decorated_generator()
+    x = await decorated_coroutine()
+    x = await other_iterator()  # E: Incompatible types in await (actual type "It", expected type Awaitable[Any])
+    x = await other_coroutine()
+
+ at coroutine
+def decorated_host_generator() -> Generator[str, None, None]:
+    yield 'a'
+    x = 0
+    x = yield from plain_generator()
+    x = yield from plain_coroutine()
+    x = yield from decorated_generator()
+    x = yield from decorated_coroutine()
+    x = yield from other_iterator()
+    x = yield from other_coroutine()  # E: "yield from" can't be applied to "Aw"
+
+ at coroutine
+async def decorated_host_coroutine() -> None:
+    x = 0
+    x = await plain_generator()  # E: Incompatible types in await (actual type Generator[str, None, int], expected type Awaitable[Any])
+    x = await plain_coroutine()
+    x = await decorated_generator()
+    x = await decorated_coroutine()
+    x = await other_iterator()  # E: Incompatible types in await (actual type "It", expected type Awaitable[Any])
+    x = await other_coroutine()
+
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-full.pyi]
+[out]
diff --git a/test-data/unit/check-basic.test b/test-data/unit/check-basic.test
new file mode 100644
index 0000000..05fa1a9
--- /dev/null
+++ b/test-data/unit/check-basic.test
@@ -0,0 +1,310 @@
+[case testEmptyFile]
+[out]
+
+[case testAssignmentAndVarDef]
+
+a = None # type: A
+b = None # type: B
+a = a
+a = b # Fail
+class A: pass
+class B: pass
+[out]
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testConstructionAndAssignment]
+
+x = None # type: A
+x = A()
+x = B()
+class A:
+    def __init__(self): pass
+class B:
+    def __init__(self): pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testInheritInitFromObject]
+
+x = None # type: A
+x = A()
+x = B()
+class A(object): pass
+class B(object): pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testImplicitInheritInitFromObject]
+
+x = None # type: A
+o = None # type: object
+x = o # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+x = A()
+o = x
+class A: pass
+class B: pass
+[out]
+
+[case testTooManyConstructorArgs]
+import typing
+object(object())
+[out]
+main:2: error: Too many arguments for "object"
+
+[case testVarDefWithInit]
+import typing
+a = A() # type: A
+b = object() # type: A
+class A: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+[case testInheritanceBasedSubtyping]
+import typing
+x = B() # type: A
+y = A() # type: B # Fail
+class A: pass
+class B(A): pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testDeclaredVariableInParentheses]
+
+(x) = None # type: int
+x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+x = 1
+
+
+-- Simple functions and calling
+-- ----------------------------
+
+
+[case testFunction]
+import typing
+def f(x: 'A') -> None: pass
+f(A())
+f(B()) # Fail
+class A: pass
+class B: pass
+[out]
+main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+
+[case testNotCallable]
+import typing
+A()()
+class A: pass
+[out]
+main:2: error: "A" not callable
+
+[case testSubtypeArgument]
+import typing
+def f(x: 'A', y: 'B') -> None: pass
+f(B(), A()) # Fail
+f(B(), B())
+
+class A: pass
+class B(A): pass
+[out]
+main:3: error: Argument 2 to "f" has incompatible type "A"; expected "B"
+
+[case testInvalidArgumentCount]
+import typing
+def f(x, y) -> None: pass
+f(object())
+f(object(), object(), object())
+[out]
+main:3: error: Too few arguments for "f"
+main:4: error: Too many arguments for "f"
+
+
+-- Locals
+-- ------
+
+
+[case testLocalVariables]
+
+def f() -> None:
+  x = None # type: A
+  y = None # type: B
+  x = x
+  x = y # Fail
+class A: pass
+class B: pass
+[out]
+main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testLocalVariableScope]
+
+def f() -> None:
+  x = None # type: A
+  x = A()
+def g() -> None:
+  x = None # type: B
+  x = A() # Fail
+class A: pass
+class B: pass
+[out]
+main:7: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testFunctionArguments]
+import typing
+def f(x: 'A', y: 'B') -> None:
+  x = y # Fail
+  x = x
+  y = B()
+class A: pass
+class B: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testLocalVariableInitialization]
+import typing
+def f() -> None:
+  a = A() # type: A
+  b = B() # type: A # Fail
+class A: pass
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testVariableInitializationWithSubtype]
+import typing
+x = B() # type: A
+y = A() # type: B # Fail
+class A: pass
+class B(A): pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+
+-- Misc
+-- ----
+
+
+[case testInvalidReturn]
+import typing
+def f() -> 'A':
+  return B()
+class A: pass
+class B: pass
+[out]
+main:3: error: Incompatible return value type (got "B", expected "A")
+
+[case testTopLevelContextAndInvalidReturn]
+import typing
+def f() -> 'A':
+  return B()
+a = B() # type: A
+class A: pass
+class B: pass
+[out]
+main:3: error: Incompatible return value type (got "B", expected "A")
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testEmptyReturnInAnyTypedFunction]
+from typing import Any
+def f() -> Any:
+  return
+
+[case testEmptyYieldInAnyTypedFunction]
+from typing import Any
+def f() -> Any:
+  yield
+
+[case testModule__name__]
+import typing
+x = __name__ # type: str
+a = __name__ # type: A  # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+class A: pass
+[builtins fixtures/primitives.pyi]
+
+[case testModule__doc__]
+import typing
+x = __doc__ # type: str
+a = __doc__ # type: A  # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+class A: pass
+[builtins fixtures/primitives.pyi]
+
+[case testModule__file__]
+import typing
+x = __file__ # type: str
+a = __file__ # type: A  # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+class A: pass
+[builtins fixtures/primitives.pyi]
+
+[case test__package__]
+import typing
+x = __package__ # type: str
+a = __file__ # type: int  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+
+-- Scoping and shadowing
+-- ---------------------
+
+
+[case testLocalVariableShadowing]
+
+a = None # type: A
+a = B()       # Fail
+a = A()
+def f() -> None:
+  a = None # type: B
+  a = A()     # Fail
+  a = B()
+a = B()       # Fail
+a = A()
+
+class A: pass
+class B: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:7: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testGlobalDefinedInBlockWithType]
+
+class A: pass
+while A:
+    a = None # type: A
+    a = A()
+    a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+
+-- # type: signatures
+-- ------------------
+
+
+[case testFunctionSignatureAsComment]
+def f(x): # type: (int) -> str
+    return 1
+f('')
+[out]
+main:2: error: Incompatible return value type (got "int", expected "str")
+main:3: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testMethodSignatureAsComment]
+class A:
+    def f(self, x):
+        # type: (int) -> str
+        self.f('') # Fail
+        return 1
+A().f('') # Fail
+[out]
+main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+main:5: error: Incompatible return value type (got "int", expected "str")
+main:6: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testTrailingCommaParsing-skip]
+x = 1
+x in 1,
+if x in 1, :
+    pass
+[out]
+
+[case testInitReturnTypeError]
+class C:
+    def __init__(self):
+        # type: () -> int
+        pass
+[out]
+main:2: error: The return type of "__init__" must be None
diff --git a/test-data/unit/check-bound.test b/test-data/unit/check-bound.test
new file mode 100644
index 0000000..4d6ede2
--- /dev/null
+++ b/test-data/unit/check-bound.test
@@ -0,0 +1,203 @@
+-- Enforcement of upper bounds
+-- ---------------------------
+
+
+[case testBoundOnGenericFunction]
+from typing import TypeVar
+
+class A: pass
+class B(A): pass
+class C(A): pass
+class D: pass
+
+T = TypeVar('T', bound=A)
+U = TypeVar('U')
+def f(x: T) -> T: pass
+def g(x: U) -> U:
+    return f(x) # Fail
+
+f(A())
+f(B())
+f(D()) # Fail
+
+b = B()
+b = f(b)
+b = f(C()) # Fail
+[out]
+main:12: error: Type argument 1 of "f" has incompatible value "U"
+main:16: error: Type argument 1 of "f" has incompatible value "D"
+main:20: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+
+[case testBoundOnGenericClass]
+from typing import TypeVar, Generic
+
+class A: pass
+class B(A): pass
+T = TypeVar('T', bound=A)
+
+class G(Generic[T]):
+    def __init__(self, x: T) -> None: pass
+
+v = None # type: G[A]
+w = None # type: G[B]
+x = None # type: G[str] # E: Type argument "builtins.str" of "G" must be a subtype of "__main__.A"
+y = G('a') # E: Type argument 1 of "G" has incompatible value "str"
+z = G(A())
+z = G(B())
+
+
+[case testBoundVoid]
+from typing import TypeVar, Generic
+T = TypeVar('T', bound=int)
+class C(Generic[T]):
+    t = None # type: T
+    def get(self) -> T:
+        return self.t
+c1 = None # type: C[None]
+c1.get()
+d = c1.get() # E: "get" of "C" does not return a value
+
+
+[case testBoundAny]
+from typing import TypeVar, Generic
+T = TypeVar('T', bound=int)
+class C(Generic[T]):
+    def __init__(self, x: T) -> None: pass
+def f(x: T) -> T:
+    return x
+
+def g(): pass
+
+f(g())
+C(g())
+z = None # type: C
+
+
+[case testBoundHigherOrderWithVoid]
+from typing import TypeVar, Callable
+class A: pass
+T = TypeVar('T', bound=A)
+def f(g: Callable[[], T]) -> T:
+    return g()
+def h() -> None: pass
+f(h)
+a = f(h) # E: "f" does not return a value
+
+
+[case testBoundInheritance]
+from typing import TypeVar, Generic
+class A: pass
+T = TypeVar('T')
+TA = TypeVar('TA', bound=A)
+
+class C(Generic[TA]): pass
+class D0(C[TA], Generic[TA]): pass
+class D1(C[T], Generic[T]): pass # E: Type argument "T`1" of "C" must be a subtype of "__main__.A"
+class D2(C[A]): pass
+class D3(C[str]): pass # E: Type argument "builtins.str" of "C" must be a subtype of "__main__.A"
+
+
+-- Using information from upper bounds
+-- -----------------------------------
+
+
+[case testBoundGenericFunctions]
+from typing import TypeVar
+class A: pass
+class B(A): pass
+
+T = TypeVar('T')
+TA = TypeVar('TA', bound=A)
+TB = TypeVar('TB', bound=B)
+
+def f(x: T) -> T:
+    return x
+def g(x: TA) -> TA:
+    return f(x)
+def h(x: TB) -> TB:
+    return g(x)
+def g2(x: TA) -> TA:
+    return h(x) # Fail
+
+def j(x: TA) -> A:
+    return x
+def k(x: TA) -> B:
+    return x # Fail
+[out]
+main:16: error: Type argument 1 of "h" has incompatible value "TA"
+main:21: error: Incompatible return value type (got "TA", expected "B")
+
+
+[case testBoundMethodUsage]
+from typing import TypeVar
+class A0:
+    def foo(self) -> None: pass
+class A(A0):
+    def bar(self) -> None: pass
+    a = 1
+    @property
+    def b(self) -> int:
+        return self.a
+class B(A):
+    def baz(self) -> None: pass
+
+T = TypeVar('T', bound=A)
+
+def f(x: T) -> T:
+    x.foo()
+    x.bar()
+    x.baz()  # E: "T" has no attribute "baz"
+    x.a
+    x.b
+    return x
+
+b = f(B())
+[builtins fixtures/property.pyi]
+[out]
+
+[case testBoundClassMethod]
+from typing import TypeVar
+class A0:
+    @classmethod
+    def foo(cls, x: int) -> int: pass
+class A(A0): pass
+
+T = TypeVar('T', bound=A)
+def f(x: T) -> int:
+    return x.foo(22)
+[builtins fixtures/classmethod.pyi]
+
+
+[case testBoundStaticMethod]
+from typing import TypeVar
+class A0:
+    @staticmethod
+    def foo(x: int) -> int: pass
+class A(A0): pass
+
+T = TypeVar('T', bound=A)
+def f(x: T) -> int:
+    return x.foo(22)
+[builtins fixtures/staticmethod.pyi]
+
+
+[case testBoundOnDecorator]
+from typing import TypeVar, Callable, Any, cast
+T = TypeVar('T', bound=Callable[..., Any])
+
+def twice(f: T) -> T:
+    def result(*args, **kwargs) -> Any:
+        f(*args, **kwargs)
+        return f(*args, **kwargs)
+    return cast(T, result)
+
+ at twice
+def foo(x: int) -> int:
+    return x
+
+a = 1
+b = foo(a)
+b = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+twice(a) # E: Type argument 1 of "twice" has incompatible value "int"
+[builtins fixtures/args.pyi]
diff --git a/test-data/unit/check-callable.test b/test-data/unit/check-callable.test
new file mode 100644
index 0000000..429ad44
--- /dev/null
+++ b/test-data/unit/check-callable.test
@@ -0,0 +1,345 @@
+[case testCallableDef]
+def f() -> None: pass
+
+if callable(f):
+    f()
+else:
+    f += 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableLambda]
+f = lambda: None
+
+if callable(f):
+    f()
+else:
+    f += 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableNotCallable]
+x = 5
+
+if callable(x):
+    x()
+else:
+    x += 5
+
+[builtins fixtures/callable.pyi]
+
+[case testUnion]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, Callable[[], str]]
+
+if callable(x):
+    y = x() + 'test'
+else:
+    z = x + 6
+
+[builtins fixtures/callable.pyi]
+
+[case testUnionMultipleReturnTypes]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, Callable[[], str], Callable[[], int]]
+
+if callable(x):
+    y = x() + 2 # E: Unsupported operand types for + (likely involving Union)
+else:
+    z = x + 6
+
+[builtins fixtures/callable.pyi]
+
+[case testUnionMultipleNonCallableTypes]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, str, Callable[[], str]]
+
+if callable(x):
+    y = x() + 'test'
+else:
+    z = x + 6  # E: Unsupported operand types for + (likely involving Union)
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableThenIsinstance]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, str, Callable[[], str], Callable[[], int]]
+
+if callable(x):
+    y = x()
+    if isinstance(y, int):
+        b1 = y + 2
+    else:
+        b2 = y + 'test'
+else:
+    if isinstance(x, int):
+        b3 = x + 3
+    else:
+        b4 = x + 'test2'
+
+[builtins fixtures/callable.pyi]
+
+[case testIsinstanceThenCallable]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, str, Callable[[], str], Callable[[], int]]
+
+if isinstance(x, int):
+    b1 = x + 1
+else:
+    if callable(x):
+        y = x()
+        if isinstance(y, int):
+            b2 = y + 1
+        else:
+            b3 = y + 'test'
+    else:
+        b4 = x + 'test2'
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableWithDifferentArgTypes]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, Callable[[], None], Callable[[int], None]]
+
+if callable(x):
+    x()  # E: Too few arguments
+
+[builtins fixtures/callable.pyi]
+
+[case testClassInitializer]
+from typing import Callable, Union
+
+class A:
+    x = 5
+
+a = A  # type: Union[A, Callable[[], A]]
+
+if callable(a):
+    a = a()
+
+a.x + 6
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableVariables]
+from typing import Union
+
+class A:
+    x = 5
+
+class B:
+    x = int
+
+x = A()  # type: Union[A, B]
+
+if callable(x.x):
+    y = x.x()
+else:
+    y = x.x + 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableAnd]
+from typing import Union, Callable
+
+x = 5  # type: Union[int, Callable[[], str]]
+
+if callable(x) and x() == 'test':
+    x()
+else:
+    x + 5  # E: Unsupported left operand type for + (some union)
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableOr]
+from typing import Union, Callable
+
+x = 5  # type: Union[int, Callable[[], str]]
+
+if callable(x) or x() == 'test':  # E: "int" not callable
+    x()  # E: "int" not callable
+else:
+    x + 5
+[builtins fixtures/callable.pyi]
+
+[case testCallableOrOtherType]
+from typing import Union, Callable
+
+x = 5  # type: Union[int, Callable[[], str]]
+
+if callable(x) or x == 2:
+    pass
+else:
+    pass
+[builtins fixtures/callable.pyi]
+
+[case testAnyCallable]
+from typing import Any
+
+x = 5  # type: Any
+
+if callable(x):
+    reveal_type(x)  # E: Revealed type is 'Any'
+else:
+    reveal_type(x)  # E: Revealed type is 'Any'
+[builtins fixtures/callable.pyi]
+
+[case testCallableCallableClasses]
+from typing import Union
+
+
+class A:
+    pass
+
+
+class B:
+    def __call__(self) -> None:
+        pass
+
+
+a = A()  # type: A
+b = B()  # type: B
+c = A()  # type: Union[A, B]
+
+if callable(a):
+    5 + 'test'
+
+if not callable(b):
+    5 + 'test'
+
+if callable(c):
+    reveal_type(c)  # E: Revealed type is '__main__.B'
+else:
+    reveal_type(c)  # E: Revealed type is '__main__.A'
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableNestedUnions]
+from typing import Callable, Union
+
+T = Union[Union[int, Callable[[], int]], Union[str, Callable[[], str]]]
+
+def f(t: T) -> None:
+    if callable(t):
+        reveal_type(t())  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+    else:
+        reveal_type(t)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableTypeVarEmpty]
+from typing import TypeVar
+
+T = TypeVar('T')
+
+def f(t: T) -> T:
+    if callable(t):
+        return 5
+    else:
+        return t
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableTypeVarUnion]
+from typing import Callable, TypeVar, Union
+
+T = TypeVar('T', int, Callable[[], int], Union[str, Callable[[], str]])
+
+def f(t: T) -> None:
+    if callable(t):
+        reveal_type(t())  # E: Revealed type is 'builtins.int'  # E: Revealed type is 'builtins.str'
+    else:
+        reveal_type(t)  # E: Revealed type is 'builtins.int*'  # E: Revealed type is 'builtins.str'
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableTypeVarBound]
+from typing import TypeVar
+
+
+class A:
+    def __call__(self) -> str:
+        return 'hi'
+
+
+T = TypeVar('T', bound=A)
+
+def f(t: T) -> str:
+    if callable(t):
+        return t()
+    else:
+        return 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableTypeType]
+from typing import Type
+
+
+class A:
+    pass
+
+
+T = Type[A]
+
+def f(t: T) -> A:
+    if callable(t):
+        return t()
+    else:
+        return 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableTypeUnion]
+from abc import ABCMeta, abstractmethod
+from typing import Type, Union
+
+
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self) -> None:
+        pass
+
+
+class B:
+    pass
+
+
+x = B  # type: Union[Type[A], Type[B]]
+if callable(x):
+    # Abstract classes raise an error when called, but are indeed `callable`
+    pass
+else:
+    'test' + 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableUnionOfTypes]
+from abc import ABCMeta, abstractmethod
+from typing import Type, Union
+
+
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self) -> None:
+        pass
+
+
+class B:
+    pass
+
+
+x = B  # type: Type[Union[A, B]]
+if callable(x):
+    # Abstract classes raise an error when called, but are indeed `callable`
+    pass
+else:
+    'test' + 5
+
+[builtins fixtures/callable.pyi]
diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test
new file mode 100644
index 0000000..710f750
--- /dev/null
+++ b/test-data/unit/check-class-namedtuple.test
@@ -0,0 +1,669 @@
+[case testNewNamedTupleOldPythonVersion]
+# flags: --python-version 3.5
+from typing import NamedTuple
+
+class E(NamedTuple):  # E: NamedTuple class syntax is only supported in Python 3.6
+    pass
+
+[case testNewNamedTupleNoUnderscoreFields]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    _y: int  # E: NamedTuple field name cannot start with an underscore: _y
+    _z: int  # E: NamedTuple field name cannot start with an underscore: _z
+
+[case testNewNamedTupleAccessingAttributes]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+x: X
+x.x
+x.y
+x.z # E: "X" has no attribute "z"
+
+[case testNewNamedTupleAttributesAreReadOnly]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+
+x: X
+x.x = 5 # E: Property "x" defined in "X" is read-only
+x.y = 5 # E: "X" has no attribute "y"
+
+class A(X): pass
+a: A
+a.x = 5 # E: Property "x" defined in "A" is read-only
+
+[case testNewNamedTupleCreateWithPositionalArguments]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+x = X(1, '2')
+x.x
+x.z      # E: "X" has no attribute "z"
+x = X(1) # E: Too few arguments for "X"
+x = X(1, '2', 3)  # E: Too many arguments for "X"
+
+[case testNewNamedTupleShouldBeSingleBase]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class A: ...
+class X(NamedTuple, A):  # E: NamedTuple should be a single base
+    pass
+
+[case testCreateNewNamedTupleWithKeywordArguments]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+x = X(x=1, y='x')
+x = X(1, y='x')
+x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X"
+x = X(y='x') # E: Missing positional argument "x" in call to "X"
+
+[case testNewNamedTupleCreateAndUseAsTuple]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+x = X(1, 'x')
+a, b = x
+a, b, c = x  # E: Need more than 2 values to unpack (3 expected)
+
+[case testNewNamedTupleWithItemTypes]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class N(NamedTuple):
+    a: int
+    b: str
+
+n = N(1, 'x')
+s: str = n.a  # E: Incompatible types in assignment (expression has type "int", \
+                          variable has type "str")
+i: int = n.b  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+x, y = n
+x = y  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testNewNamedTupleConstructorArgumentTypes]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class N(NamedTuple):
+    a: int
+    b: str
+
+n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "int"
+n = N(1, b=2)   # E: Argument 2 to "N" has incompatible type "int"; expected "str"
+N(1, 'x')
+N(b='x', a=1)
+
+[case testNewNamedTupleAsBaseClass]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class N(NamedTuple):
+    a: int
+    b: str
+
+class X(N):
+    pass
+x = X(1, 2)  # E: Argument 2 to "X" has incompatible type "int"; expected "str"
+s = ''
+i = 0
+s = x.a  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+i, s = x
+s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testNewNamedTupleSelfTypeWithNamedTupleAsBase]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class A(NamedTuple):
+    a: int
+    b: str
+
+class B(A):
+    def f(self, x: int) -> None:
+        self.f(self.a)
+        self.f(self.b)  # E: Argument 1 to "f" of "B" has incompatible type "str"; expected "int"
+        i = 0
+        s = ''
+        i, s = self
+        i, i = self  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+[out]
+
+[case testNewNamedTupleTypeReferenceToClassDerivedFrom]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class A(NamedTuple):
+    a: int
+    b: str
+
+class B(A):
+    def f(self, x: 'B') -> None:
+        i = 0
+        s = ''
+        self = x
+        i, s = x
+        i, s = x.a, x.b
+        i, s = x.a, x.a  # E: Incompatible types in assignment (expression has type "int", \
+                              variable has type "str")
+        i, i = self  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+
+[out]
+
+[case testNewNamedTupleSubtyping]
+# flags: --python-version 3.6
+from typing import NamedTuple, Tuple
+
+class A(NamedTuple):
+    a: int
+    b: str
+
+class B(A): pass
+a = A(1, '')
+b = B(1, '')
+t: Tuple[int, str]
+b = a  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = t  # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "A")
+b = t  # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "B")
+t = a
+t = (1, '')
+t = b
+a = b
+
+[case testNewNamedTupleSimpleTypeInference]
+# flags: --python-version 3.6
+from typing import NamedTuple, Tuple
+
+class A(NamedTuple):
+    a: int
+
+l = [A(1), A(2)]
+a = A(1)
+a = l[0]
+(i,) = l[0]
+i, i = l[0]  # E: Need more than 1 value to unpack (2 expected)
+l = [A(1)]
+a = (1,)  # E: Incompatible types in assignment (expression has type "Tuple[int]", \
+               variable has type "A")
+[builtins fixtures/list.pyi]
+
+[case testNewNamedTupleMissingClassAttribute]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class MyNamedTuple(NamedTuple):
+    a: int
+    b: str
+
+MyNamedTuple.x # E: Type[MyNamedTuple] has no attribute "x"
+
+[case testNewNamedTupleEmptyItems]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class A(NamedTuple):
+    ...
+
+[case testNewNamedTupleForwardRef]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class A(NamedTuple):
+    b: 'B'
+
+class B: ...
+
+a = A(B())
+a = A(1)  # E: Argument 1 to "A" has incompatible type "int"; expected "B"
+
+[case testNewNamedTupleProperty]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class A(NamedTuple):
+    a: int
+
+class B(A):
+    @property
+    def b(self) -> int:
+        return self.a
+class C(B): pass
+B(1).b
+C(2).b
+
+[builtins fixtures/property.pyi]
+
+[case testNewNamedTupleAsDict]
+# flags: --python-version 3.6
+from typing import NamedTuple, Any
+
+class X(NamedTuple):
+    x: Any
+    y: Any
+
+x: X
+reveal_type(x._asdict())  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+
+[builtins fixtures/dict.pyi]
+
+[case testNewNamedTupleReplaceTyped]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+x: X
+reveal_type(x._replace())  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
+x._replace(x=5)
+x._replace(y=5)  # E: Argument 1 to X._replace has incompatible type "int"; expected "str"
+
+[case testNewNamedTupleFields]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+reveal_type(X._fields)  # E: Revealed type is 'Tuple[builtins.str, builtins.str]'
+reveal_type(X._field_types)  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+reveal_type(X._field_defaults)  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+reveal_type(X.__annotations__)  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+
+[builtins fixtures/dict.pyi]
+
+[case testNewNamedTupleUnit]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    pass
+
+x: X = X()
+x._replace()
+x._fields[0]  # E: Tuple index out of range
+
+[case testNewNamedTupleJoinNamedTuple]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+class Y(NamedTuple):
+    x: int
+    y: str
+
+reveal_type([X(3, 'b'), Y(1, 'a')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+
+[builtins fixtures/list.pyi]
+
+[case testNewNamedTupleJoinTuple]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+reveal_type([(3, 'b'), X(1, 'a')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+reveal_type([X(1, 'a'), (3, 'b')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+
+[builtins fixtures/list.pyi]
+
+[case testNewNamedTupleWithTooManyArguments]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y = z = 2  # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"
+    def f(self): pass
+
+[case testNewNamedTupleWithInvalidItems2]
+# flags: --python-version 3.6
+import typing
+
+class X(typing.NamedTuple):
+    x: int
+    y = 1
+    x.x: int
+    z: str = 'z'
+    aa: int
+
+[out]
+main:6: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"
+main:7: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"
+main:7: error: Type cannot be declared in assignment to non-self attribute
+main:7: error: "int" has no attribute "x"
+main:9: error: Non-default NamedTuple fields cannot follow default fields
+
+[builtins fixtures/list.pyi]
+
+[case testNewNamedTupleWithoutTypesSpecified]
+# flags: --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y = 2  # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"
+
+[case testTypeUsingTypeCNamedTuple]
+# flags: --python-version 3.6
+from typing import NamedTuple, Type
+
+class N(NamedTuple):
+    x: int
+    y: str
+
+def f(a: Type[N]):
+    a()
+[builtins fixtures/list.pyi]
+[out]
+main:8: error: Unsupported type Type["N"]
+
+[case testNewNamedTupleWithDefaults]
+# flags: --fast-parser --python-version 3.6
+from typing import List, NamedTuple, Optional
+
+class X(NamedTuple):
+    x: int
+    y: int = 2
+
+reveal_type(X(1))  # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.X]'
+reveal_type(X(1, 2))  # E: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.X]'
+
+X(1, 'a')  # E: Argument 2 to "X" has incompatible type "str"; expected "int"
+X(1, z=3)  # E: Unexpected keyword argument "z" for "X"
+
+class HasNone(NamedTuple):
+    x: int
+    y: Optional[int] = None
+
+reveal_type(HasNone(1))  # E: Revealed type is 'Tuple[builtins.int, Union[builtins.int, builtins.None], fallback=__main__.HasNone]'
+
+class Parameterized(NamedTuple):
+    x: int
+    y: List[int] = [1] + [2]
+    z: List[int] = []
+
+reveal_type(Parameterized(1))  # E: Revealed type is 'Tuple[builtins.int, builtins.list[builtins.int], builtins.list[builtins.int], fallback=__main__.Parameterized]'
+Parameterized(1, ['not an int'])  # E: List item 0 has incompatible type "str"
+
+class Default:
+    pass
+
+class UserDefined(NamedTuple):
+    x: Default = Default()
+
+reveal_type(UserDefined())  # E: Revealed type is 'Tuple[__main__.Default, fallback=__main__.UserDefined]'
+reveal_type(UserDefined(Default()))  # E: Revealed type is 'Tuple[__main__.Default, fallback=__main__.UserDefined]'
+UserDefined(1)  # E: Argument 1 to "UserDefined" has incompatible type "int"; expected "Default"
+
+[builtins fixtures/list.pyi]
+
+[case testNewNamedTupleWithDefaultsStrictOptional]
+# flags: --fast-parser --strict-optional --python-version 3.6
+from typing import List, NamedTuple, Optional
+
+class HasNone(NamedTuple):
+    x: int
+    y: Optional[int] = None
+
+reveal_type(HasNone(1))  # E: Revealed type is 'Tuple[builtins.int, Union[builtins.int, builtins.None], fallback=__main__.HasNone]'
+HasNone(None)  # E: Argument 1 to "HasNone" has incompatible type None; expected "int"
+HasNone(1, y=None)
+HasNone(1, y=2)
+
+class CannotBeNone(NamedTuple):
+    x: int
+    y: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+
+[builtins fixtures/list.pyi]
+
+[case testNewNamedTupleWrongType]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: int = 'not an int'  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testNewNamedTupleErrorInDefault]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int = 1 + '1'  # E: Unsupported operand types for + ("int" and "str")
+
+[case testNewNamedTupleInheritance]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: str
+    y: int = 3
+
+class Y(X):
+    def method(self) -> str:
+        self.y
+        return self.x
+
+reveal_type(Y('a'))  # E: Revealed type is 'Tuple[builtins.str, builtins.int, fallback=__main__.Y]'
+Y(y=1, x='1').method()
+
+class CallsBaseInit(X):
+    def __init__(self, x: str) -> None:
+        super().__init__(x)
+
+[case testNewNamedTupleWithMethods]
+from typing import NamedTuple
+
+class XMeth(NamedTuple):
+    x: int
+    def double(self) -> int:
+        return self.x
+    async def asyncdouble(self) -> int:
+        return self.x
+
+class XRepr(NamedTuple):
+    x: int
+    y: int = 1
+    def __str__(self) -> str:
+        return 'string'
+    def __add__(self, other: XRepr) -> int:
+        return 0
+
+reveal_type(XMeth(1).double()) # E: Revealed type is 'builtins.int'
+reveal_type(XMeth(1).asyncdouble())  # E: Revealed type is 'typing.Awaitable[builtins.int]'
+reveal_type(XMeth(42).x)  # E: Revealed type is 'builtins.int'
+reveal_type(XRepr(42).__str__())  # E: Revealed type is 'builtins.str'
+reveal_type(XRepr(1, 2).__add__(XRepr(3)))  # E: Revealed type is 'builtins.int'
+[typing fixtures/typing-full.pyi]
+
+[case testNewNamedTupleOverloading]
+from typing import NamedTuple, overload
+
+class Overloader(NamedTuple):
+    x: int
+    @overload
+    def method(self, y: str) -> str: pass
+    @overload
+    def method(self, y: int) -> int: pass
+    def method(self, y):
+        return y
+
+reveal_type(Overloader(1).method('string'))  # E: Revealed type is 'builtins.str'
+reveal_type(Overloader(1).method(1))  # E: Revealed type is 'builtins.int'
+Overloader(1).method(('tuple',))  # E: No overload variant of "method" of "Overloader" matches argument types [Tuple[builtins.str]]
+
+[case testNewNamedTupleMethodInheritance]
+from typing import NamedTuple, TypeVar
+
+T = TypeVar('T')
+
+class Base(NamedTuple):
+    x: int
+    def copy(self: T) -> T:
+        reveal_type(self)  # E: Revealed type is 'T`-1'
+        return self
+    def good_override(self) -> int:
+        reveal_type(self)  # E: Revealed type is 'Tuple[builtins.int, fallback=__main__.Base]'
+        reveal_type(self[0])  # E: Revealed type is 'builtins.int'
+        self[0] = 3  # E: Unsupported target for indexed assignment
+        reveal_type(self.x)  # E: Revealed type is 'builtins.int'
+        self.x = 3  # E: Property "x" defined in "Base" is read-only
+        self[1]  # E: Tuple index out of range
+        reveal_type(self[T])  # E: Revealed type is 'builtins.int'
+        return self.x
+    def bad_override(self) -> int:
+        return self.x
+
+class Child(Base):
+    def new_method(self) -> int:
+        reveal_type(self)  # E: Revealed type is 'Tuple[builtins.int, fallback=__main__.Child]'
+        reveal_type(self[0])  # E: Revealed type is 'builtins.int'
+        self[0] = 3  # E: Unsupported target for indexed assignment
+        reveal_type(self.x)  # E: Revealed type is 'builtins.int'
+        self.x = 3  # E: Property "x" defined in "Child" is read-only
+        self[1]  # E: Tuple index out of range
+        return self.x
+    def good_override(self) -> int:
+        return 0
+    def bad_override(self) -> str:  # E: Return type of "bad_override" incompatible with supertype "Base"
+        return 'incompatible'
+
+def takes_base(base: Base) -> int:
+    return base.x
+
+reveal_type(Base(1).copy())  # E: Revealed type is 'Tuple[builtins.int, fallback=__main__.Base]'
+reveal_type(Child(1).copy())  # E: Revealed type is 'Tuple[builtins.int, fallback=__main__.Child]'
+reveal_type(Base(1).good_override())  # E: Revealed type is 'builtins.int'
+reveal_type(Child(1).good_override())  # E: Revealed type is 'builtins.int'
+reveal_type(Base(1).bad_override())  # E: Revealed type is 'builtins.int'
+reveal_type(takes_base(Base(1)))  # E: Revealed type is 'builtins.int'
+reveal_type(takes_base(Child(1)))  # E: Revealed type is 'builtins.int'
+[builtins fixtures/tuple.pyi]
+
+[case testNewNamedTupleIllegalNames]
+from typing import Callable, NamedTuple
+
+class XMethBad(NamedTuple):
+    x: int
+    def _fields(self):  # E: Cannot overwrite NamedTuple attribute "_fields"
+        return 'no chance for this'
+
+class MagicalFields(NamedTuple):
+    x: int
+    def __slots__(self) -> None: pass  # E: Cannot overwrite NamedTuple attribute "__slots__"
+    def __new__(cls) -> None: pass  # E: Cannot overwrite NamedTuple attribute "__new__"
+    def _source(self) -> int: pass  # E: Cannot overwrite NamedTuple attribute "_source"
+    __annotations__ = {'x': float}  # E: NamedTuple field name cannot start with an underscore: __annotations__ \
+        # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" \
+        # E: Cannot overwrite NamedTuple attribute "__annotations__"
+
+class AnnotationsAsAMethod(NamedTuple):
+    x: int
+    # This fails at runtime because typing.py assumes that __annotations__ is a dictionary.
+    def __annotations__(self) -> float:  # E: Cannot overwrite NamedTuple attribute "__annotations__"
+        return 1.0
+
+class ReuseNames(NamedTuple):
+    x: int
+    def x(self) -> str:  # E: Name 'x' already defined
+        return ''
+
+    def y(self) -> int:
+        return 0
+    y: str  # E: Name 'y' already defined
+
+class ReuseCallableNamed(NamedTuple):
+    z: Callable[[ReuseNames], int]
+    def z(self) -> int:  # E: Name 'z' already defined
+        return 0
+
+[builtins fixtures/dict.pyi]
+
+[case testNewNamedTupleDocString]
+from typing import NamedTuple
+
+class Documented(NamedTuple):
+    """This is a docstring."""
+    x: int
+
+reveal_type(Documented.__doc__)  # E: Revealed type is 'builtins.str'
+reveal_type(Documented(1).x)  # E: Revealed type is 'builtins.int'
+
+class BadDoc(NamedTuple):
+    x: int
+    def __doc__(self) -> str:
+        return ''
+
+reveal_type(BadDoc(1).__doc__())  # E: Revealed type is 'builtins.str'
+
+[case testNewNamedTupleClassMethod]
+from typing import NamedTuple
+
+class HasClassMethod(NamedTuple):
+    x: str
+
+    @classmethod
+    def new(cls, f: str) -> 'HasClassMethod':
+        reveal_type(cls)  # E: Revealed type is 'def (x: builtins.str) -> Tuple[builtins.str, fallback=__main__.HasClassMethod]'
+        reveal_type(HasClassMethod)  # E: Revealed type is 'def (x: builtins.str) -> Tuple[builtins.str, fallback=__main__.HasClassMethod]'
+        return cls(x=f)
+
+[builtins fixtures/classmethod.pyi]
+
+[case testNewNamedTupleStaticMethod]
+from typing import NamedTuple
+
+class HasStaticMethod(NamedTuple):
+    x: str
+
+    @staticmethod
+    def new(f: str) -> 'HasStaticMethod':
+        return HasStaticMethod(x=f)
+
+[builtins fixtures/classmethod.pyi]
+
+[case testNewNamedTupleProperty]
+from typing import NamedTuple
+
+class HasStaticMethod(NamedTuple):
+    x: str
+
+    @property
+    def size(self) -> int:
+        reveal_type(self)  # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.HasStaticMethod]'
+        return 4
+
+[builtins fixtures/property.pyi]
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
new file mode 100644
index 0000000..0b4b0f5
--- /dev/null
+++ b/test-data/unit/check-classes.test
@@ -0,0 +1,3551 @@
+-- Methods
+-- -------
+
+
+[case testMethodCall]
+
+a = None # type: A
+b = None # type: B
+
+a.foo(B())        # Fail
+a.bar(B(), A())   # Fail
+
+a.foo(A())
+b.bar(B(), A())
+
+class A:
+    def foo(self, x: 'A') -> None: pass
+class B:
+    def bar(self, x: 'B', y: A) -> None: pass
+[out]
+main:5: error: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A"
+main:6: error: "A" has no attribute "bar"
+
+[case testMethodCallWithSubtype]
+
+a = None # type: A
+a.foo(A())
+a.foo(B())
+a.bar(A()) # Fail
+a.bar(B())
+
+class A:
+    def foo(self, x: 'A') -> None: pass
+    def bar(self, x: 'B') -> None: pass
+class B(A): pass
+[out]
+main:5: error: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B"
+
+[case testInheritingMethod]
+
+a = None # type: B
+a.foo(A()) # Fail
+a.foo(B())
+
+class A:
+    def foo(self, x: 'B') -> None: pass
+class B(A): pass
+[out]
+main:3: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B"
+
+[case testMethodCallWithInvalidNumberOfArguments]
+
+a = None # type: A
+a.foo()               # Fail
+a.foo(object(), A())  # Fail
+
+class A:
+    def foo(self, x: 'A') -> None: pass
+[out]
+main:3: error: Too few arguments for "foo" of "A"
+main:4: error: Too many arguments for "foo" of "A"
+main:4: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A"
+
+[case testMethodBody]
+import typing
+class A:
+    def f(self) -> None:
+        a = object() # type: A    # Fail
+[out]
+main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+[case testMethodArguments]
+import typing
+class A:
+    def f(self, a: 'A', b: 'B') -> None:
+        a = B() # Fail
+        b = A() # Fail
+        a = A()
+        b = B()
+        a = a
+        a = b # Fail
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testReturnFromMethod]
+import typing
+class A:
+    def f(self) -> 'A':
+        return B() # Fail
+        return A()
+class B: pass
+[out]
+main:4: error: Incompatible return value type (got "B", expected "A")
+
+[case testSelfArgument]
+import typing
+class A:
+    def f(self) -> None:
+        o = self # type: B    # Fail
+        self.g()      # Fail
+        a = self # type: A
+        self.f()
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:5: error: "A" has no attribute "g"
+
+[case testAssignToMethodViaInstance]
+import typing
+class A:
+    def f(self): pass
+A().f = None # E: Cannot assign to a method
+
+
+-- Attributes
+-- ----------
+
+
+[case testReferToInvalidAttribute]
+
+class A:
+    def __init__(self):
+        self.x = object()
+a = None # type: A
+a.y
+a.y = object()
+a.x
+a.x = object()
+[out]
+main:6: error: "A" has no attribute "y"
+main:7: error: "A" has no attribute "y"
+
+[case testArgumentTypeInference]
+
+class A:
+    def __init__(self, aa: 'A', bb: 'B') -> None:
+        self.a = aa
+        self.b = bb
+class B: pass
+a = None # type: A
+b = None # type: B
+a.a = b # Fail
+a.b = a # Fail
+b.a     # Fail
+a.a = a
+a.b = b
+[out]
+main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:10: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:11: error: "B" has no attribute "a"
+
+[case testExplicitAttributeInBody]
+
+a = None # type: A
+a.x = object() # Fail
+a.x = A()
+class A:
+  x = None # type: A
+[out]
+main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+[case testAttributeDefinedInNonInitMethod]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.y = ''
+        self.x = 1
+a = A()
+a.x = 1
+a.y = ''
+a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+a.z = 0  # E: "A" has no attribute "z"
+
+[case testInheritanceAndAttributeAssignment]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 0
+class B(A):
+    def f(self) -> None:
+        self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[out]
+
+[case testAssignmentToAttributeInMultipleMethods]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 0
+    def g(self) -> None:
+        self.x = '' # Fail
+    def __init__(self) -> None:
+        self.x = '' # Fail
+[out]
+main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:8: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testClassNamesDefinedOnSelUsedInClassBody]
+class A(object):
+    def f(self):
+        self.attr = 1
+    attr = 0
+
+class B(object):
+    attr = 0
+    def f(self):
+        self.attr = 1
+
+class C(object):
+    attr = 0
+    def f(self):
+        self.attr = 1
+    attr = 0
+
+class D(object):
+    def g(self):
+        self.attr = 1
+    attr = 0
+    def f(self):
+        self.attr = 1
+[out]
+
+[case testClassNamesDefinedOnSelUsedInClassBodyReveal]
+class A(object):
+    def f(self) -> None:
+        self.attr = 1
+    reveal_type(attr)  # E: Revealed type is 'builtins.int'
+
+class B(object):
+    attr = 0
+    def f(self) -> None:
+        reveal_type(self.attr)  # E: Revealed type is 'builtins.int'
+[out]
+
+
+-- Method overriding
+-- -----------------
+
+
+[case testMethodOverridingWithIdenticalSignature]
+import typing
+class A:
+    def f(self, x: 'A') -> None: pass
+    def g(self, x: 'B' , y: object) -> 'A': pass
+    def h(self) -> None: pass
+class B(A):
+    def f(self, x: A) -> None: pass
+    def g(self, x: 'B' , y: object) -> A: pass
+    def h(self) -> None: pass
+[out]
+
+[case testMethodOverridingWithCovariantType]
+import typing
+class A:
+  def f(self, x: 'A', y: 'B') -> 'A': pass
+  def g(self, x: 'A', y: 'B') -> 'A': pass
+class B(A):
+  def f(self, x: A, y: 'B') -> 'B': pass
+  def g(self, x: A, y: A) -> 'A': pass
+[out]
+
+[case testMethodOverridingWithIncompatibleTypes]
+import typing
+class A:
+  def f(self, x: 'A', y: 'B') -> 'A': pass
+  def g(self, x: 'A', y: 'B') -> 'A': pass
+  def h(self, x: 'A', y: 'B') -> 'A': pass
+class B(A):
+  def f(self, x: 'B', y: 'B') -> A: pass  # Fail
+  def g(self, x: A, y: A) -> A: pass
+  def h(self, x: A, y: 'B') -> object: pass  # Fail
+[out]
+main:7: error: Argument 1 of "f" incompatible with supertype "A"
+main:9: error: Return type of "h" incompatible with supertype "A"
+
+[case testMethodOverridingWithIncompatibleArgumentCount]
+import typing
+class A:
+    def f(self, x: 'A') -> None: pass
+    def g(self, x: 'A', y: 'B') -> 'A': pass
+class B(A):
+    def f(self, x: A, y: A) -> None: pass # Fail
+    def g(self, x: A) -> A: pass # Fail
+[out]
+main:6: error: Signature of "f" incompatible with supertype "A"
+main:7: error: Signature of "g" incompatible with supertype "A"
+
+[case testMethodOverridingAcrossDeepInheritanceHierarchy1]
+import typing
+class A:
+    def f(self, x: 'B') -> None: pass
+class B(A): pass
+class C(B): # with gap in implementations
+    def f(self, x: 'C') -> None:  # Fail
+        pass
+[out]
+main:6: error: Argument 1 of "f" incompatible with supertype "A"
+
+[case testMethodOverridingAcrossDeepInheritanceHierarchy2]
+import typing
+class A:
+    def f(self) -> 'B': pass
+class B(A):
+    def f(self) -> 'C': pass
+class C(B): # with multiple implementations
+    def f(self) -> B:  # Fail
+        pass
+[out]
+main:7: error: Return type of "f" incompatible with supertype "B"
+
+[case testMethodOverridingWithVoidReturnValue]
+import typing
+class A:
+    def f(self) -> None: pass
+    def g(self) -> 'A': pass
+class B(A):
+    def f(self) -> A: pass  # Fail
+    def g(self) -> None: pass
+[out]
+main:6: error: Return type of "f" incompatible with supertype "A"
+
+[case testOverride__new__WithDifferentSignature]
+class A:
+    def __new__(cls, x: int) -> str:
+        return ''
+
+class B(A):
+    def __new__(cls) -> int:
+        return 1
+
+[case testInnerFunctionNotOverriding]
+class A:
+    def f(self) -> int: pass
+
+class B(A):
+    def g(self) -> None:
+        def f(self) -> str: pass
+
+[case testOverride__init_subclass__WithDifferentSignature]
+class A:
+    def __init_subclass__(cls, x: int) -> None: pass
+class B(A):
+    def __init_subclass__(cls) -> None: pass
+
+
+-- Constructors
+-- ------------
+
+
+[case testTrivialConstructor]
+import typing
+a = A() # type: A
+b = A() # type: B # Fail
+class A:
+    def __init__(self) -> None: pass
+class B: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testConstructor]
+import typing
+a = A(B()) # type: A
+aa = A(object()) # type: A  # Fail
+b = A(B()) # type: B       # Fail
+class A:
+    def __init__(self, x: 'B') -> None: pass
+class B: pass
+[out]
+main:3: error: Argument 1 to "A" has incompatible type "object"; expected "B"
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testConstructorWithTwoArguments]
+import typing
+a = A(C(), B()) # type: A  # Fail
+
+class A:
+    def __init__(self, x: 'B', y: 'C') -> None: pass
+class B: pass
+class C(B): pass
+[out]
+main:2: error: Argument 2 to "A" has incompatible type "B"; expected "C"
+
+[case testInheritedConstructor]
+import typing
+b = B(C()) # type: B
+a = B(D()) # type: A # Fail
+class A:
+    def __init__(self, x: 'C') -> None: pass
+class B(A): pass
+class C: pass
+class D: pass
+[out]
+main:3: error: Argument 1 to "B" has incompatible type "D"; expected "C"
+
+[case testOverridingWithIncompatibleConstructor]
+import typing
+A()    # Fail
+B(C()) # Fail
+A(C())
+B()
+class A:
+    def __init__(self, x: 'C') -> None: pass
+class B(A):
+    def __init__(self) -> None: pass
+class C: pass
+[out]
+main:2: error: Too few arguments for "A"
+main:3: error: Too many arguments for "B"
+
+[case testConstructorWithReturnValueType]
+import typing
+class A:
+    def __init__(self) -> 'A': pass
+[out]
+main:3: error: The return type of "__init__" must be None
+
+[case testConstructorWithImplicitReturnValueType]
+import typing
+class A:
+    def __init__(self, x: int): pass
+[out]
+main:3: error: The return type of "__init__" must be None
+
+[case testInitSubclassWithReturnValueType]
+import typing
+class A:
+    def __init_subclass__(cls) -> 'A': pass
+[out]
+main:3: error: The return type of "__init_subclass__" must be None
+
+[case testInitSubclassWithImplicitReturnValueType]
+import typing
+class A:
+    def __init_subclass__(cls, x: int=1): pass
+[out]
+main:3: error: The return type of "__init_subclass__" must be None
+
+[case testGlobalFunctionInitWithReturnType]
+import typing
+a = __init__() # type: A
+b = __init__() # type: B # Fail
+def __init__() -> 'A': pass
+class A: pass
+class B: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testAccessingInit]
+from typing import Any, cast
+class A:
+    def __init__(self, a: 'A') -> None: pass
+a = None # type: A
+a.__init__(a)  # E: Cannot access "__init__" directly
+(cast(Any, a)).__init__(a)
+
+[case testDeepInheritanceHierarchy]
+import typing
+d = C() # type: D  # Fail
+d = B()      # Fail
+d = A()      # Fail
+d = D2()     # Fail
+a = D() # type: A
+a = D2()
+b = D() # type: B
+b = D2()
+
+class A: pass
+class B(A): pass
+class C(B): pass
+class D(C): pass
+class D2(C): pass
+[out]
+main:2: error: Incompatible types in assignment (expression has type "C", variable has type "D")
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "D")
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "D")
+main:5: error: Incompatible types in assignment (expression has type "D2", variable has type "D")
+
+
+-- Attribute access in class body
+-- ------------------------------
+
+
+[case testDataAttributeRefInClassBody]
+import typing
+class B: pass
+class A:
+    x = B()
+    y = x
+    b = x # type: B
+    b = x
+    c = x # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    c = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[out]
+
+[case testMethodRefInClassBody]
+from typing import Callable
+class B: pass
+class A:
+    def f(self) -> None: pass
+    g = f
+    h = f # type: Callable[[A], None]
+    h = f
+    g = h
+    ff = f # type: Callable[[B], None]  # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[B], None])
+    g = ff                # E: Incompatible types in assignment (expression has type Callable[[B], None], variable has type Callable[[A], None])
+[out]
+
+
+-- Arbitrary statements in class body
+-- ----------------------------------
+
+
+[case testStatementsInClassBody]
+import typing
+class B: pass
+class A:
+    for x in [A()]:
+        y = x
+        y = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    x = A()
+    y = A()
+    x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Class attributes
+-- ----------------
+
+
+[case testAccessMethodViaClass]
+import typing
+class A:
+    def f(self) -> None: pass
+A.f(A())
+A.f(object())     # E: Argument 1 to "f" of "A" has incompatible type "object"; expected "A"
+A.f()             # E: Too few arguments for "f" of "A"
+A.f(None, None)   # E: Too many arguments for "f" of "A"
+
+[case testAccessAttributeViaClass]
+import typing
+class B: pass
+class A:
+    x = None # type: A
+a = A.x # type: A
+b = A.x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testAccessingUndefinedAttributeViaClass]
+import typing
+class A: pass
+A.x # E: Type[A] has no attribute "x"
+
+[case testAccessingUndefinedAttributeViaClassWithOverloadedInit]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A:
+    @overload
+    def __init__(self): pass
+    @overload
+    def __init__(self, x): pass
+A.x # E: Type[A] has no attribute "x"
+
+[case testAccessMethodOfClassWithOverloadedInit]
+from foo import *
+[file foo.pyi]
+from typing import overload, Any
+class A:
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, x: Any) -> None: pass
+    def f(self) -> None: pass
+A.f(A())
+A.f()    # E: Too few arguments for "f" of "A"
+
+[case testAssignmentToClassDataAttribute]
+import typing
+class B: pass
+class A:
+    x = None # type: B
+A.x = B()
+A.x = object()  # E: Incompatible types in assignment (expression has type "object", variable has type "B")
+
+[case testAssignmentToInferredClassDataAttribute]
+import typing
+class B: pass
+class A:
+     x = B()
+A.x = B()
+A.x = A()   # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testInitMethodUnbound]
+
+class B: pass
+class A:
+    def __init__(self, b: B) -> None: pass
+a = None # type: A
+b = None # type: B
+A.__init__(a, b)
+A.__init__(b, b) # E: Argument 1 to "__init__" of "A" has incompatible type "B"; expected "A"
+A.__init__(a, a) # E: Argument 2 to "__init__" of "A" has incompatible type "A"; expected "B"
+
+[case testAssignToMethodViaClass]
+import typing
+class A:
+    def f(self): pass
+A.f = None # E: Cannot assign to a method
+
+[case testAssignToNestedClassViaClass]
+import typing
+class A:
+    class B: pass
+A.B = None # E: Cannot assign to a type
+
+[case testAccessingClassAttributeWithTypeInferenceIssue]
+x = C.x # E: Cannot determine type of 'x'
+def f() -> int: return 1
+class C:
+    x = f()
+[builtins fixtures/list.pyi]
+
+[case testAccessingClassAttributeWithTypeInferenceIssue2]
+class C:
+    x = []
+x = C.x
+[builtins fixtures/list.pyi]
+[out]
+main:2: error: Need type annotation for variable
+
+[case testAccessingGenericClassAttribute]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+class A(Generic[T]):
+    x = None  # type: T
+A.x  # E: Access to generic instance variables via class is ambiguous
+A[int].x  # E: Access to generic instance variables via class is ambiguous
+
+[case testAccessingNestedGenericClassAttribute]
+from typing import Generic, List, TypeVar, Union
+T = TypeVar('T')
+U = TypeVar('U')
+class A(Generic[T, U]):
+    x = None  # type: Union[T, List[U]]
+A.x  # E: Access to generic instance variables via class is ambiguous
+A[int, int].x  # E: Access to generic instance variables via class is ambiguous
+[builtins fixtures/list.pyi]
+
+
+-- Nested classes
+-- --------------
+
+
+[case testClassWithinFunction]
+
+def f() -> None:
+    class A:
+        def g(self) -> None: pass
+    a = None # type: A
+    a.g()
+    a.g(a) # E: Too many arguments for "g" of "A"
+[out]
+
+[case testConstructNestedClass]
+import typing
+class A:
+    class B: pass
+    b = B()
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    b = B(b) # E: Too many arguments for "B"
+[out]
+
+[case testConstructNestedClassWithCustomInit]
+import typing
+class A:
+    def f(self) -> None:
+        class B:
+            def __init__(self, a: 'A') -> None: pass
+        b = B(A())
+        b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+        b = B() # E: Too few arguments for "B"
+[out]
+
+[case testDeclareVariableWithNestedClassType]
+
+def f() -> None:
+    class A: pass
+    a = None # type: A
+    a = A()
+    a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+[out]
+
+[case testExternalReferenceToClassWithinClass]
+
+class A:
+    class B: pass
+b = None # type: A.B
+b = A.B()
+b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = A.B(b) # E: Too many arguments for "B"
+
+[case testAliasNestedClass]
+class Outer:
+    class Inner:
+        def make_int(self) -> int: return 1
+    reveal_type(Inner().make_int)  # E: Revealed type is 'def () -> builtins.int'
+    some_int = Inner().make_int()
+
+reveal_type(Outer.Inner.make_int)  # E: Revealed type is 'def (self: __main__.Outer.Inner) -> builtins.int'
+reveal_type(Outer().some_int) # E: Revealed type is 'builtins.int'
+Bar = Outer.Inner
+reveal_type(Bar.make_int)  # E: Revealed type is 'def (self: __main__.Outer.Inner) -> builtins.int'
+x = Bar()  # type: Bar
+def produce() -> Bar:
+    reveal_type(Bar().make_int)  # E: Revealed type is 'def () -> builtins.int'
+    return Bar()
+
+[case testInnerClassPropertyAccess]
+class Foo:
+    class Meta:
+        name = 'Bar'
+    meta = Meta
+
+reveal_type(Foo.Meta)  # E: Revealed type is 'def () -> __main__.Foo.Meta'
+reveal_type(Foo.meta)  # E: Revealed type is 'def () -> __main__.Foo.Meta'
+reveal_type(Foo.Meta.name)  # E: Revealed type is 'builtins.str'
+reveal_type(Foo.meta.name)  # E: Revealed type is 'builtins.str'
+reveal_type(Foo().Meta)  # E: Revealed type is 'def () -> __main__.Foo.Meta'
+reveal_type(Foo().meta)  # E: Revealed type is 'def () -> __main__.Foo.Meta'
+reveal_type(Foo().meta.name)  # E: Revealed type is 'builtins.str'
+reveal_type(Foo().Meta.name)  # E: Revealed type is 'builtins.str'
+
+-- Declaring attribute type in method
+-- ----------------------------------
+
+
+[case testDeclareAttributeTypeInInit]
+
+class A:
+    def __init__(self):
+        self.x = None # type: int
+a = None # type: A
+a.x = 1
+a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testAccessAttributeDeclaredInInitBeforeDeclaration]
+
+a = None # type: A
+a.x = 1
+a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+class A:
+    def __init__(self):
+        self.x = None # type: int
+
+
+-- Special cases
+-- -------------
+
+
+[case testMultipleClassDefinition]
+import typing
+A()
+class A: pass
+class A: pass
+[out]
+main:4: error: Name 'A' already defined on line 3
+
+[case testDocstringInClass]
+import typing
+class A:
+    """Foo"""
+class B:
+    'x'
+    y = B()
+[builtins fixtures/primitives.pyi]
+
+[case testErrorMessageInFunctionNestedWithinMethod]
+import typing
+class A:
+    def f(self) -> None:
+        def g() -> None:
+            "" + 1  # E: Unsupported operand types for + ("str" and "int")
+        "" + 1  # E: Unsupported operand types for + ("str" and "int")
+[out]
+
+-- Static methods
+-- --------------
+
+
+[case testSimpleStaticMethod]
+import typing
+class A:
+  @staticmethod
+  def f(x: int) -> None: pass
+A.f(1)
+A().f(1)
+A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+[builtins fixtures/staticmethod.pyi]
+
+[case testBuiltinStaticMethod]
+import typing
+int.from_bytes(b'', '')
+int.from_bytes('', '') # E: Argument 1 to "from_bytes" of "int" has incompatible type "str"; expected "bytes"
+[builtins fixtures/staticmethod.pyi]
+
+[case testAssignStaticMethodOnInstance]
+import typing
+class A:
+  @staticmethod
+  def f(x: int) -> None: pass
+A().f = A.f # E: Cannot assign to a method
+[builtins fixtures/staticmethod.pyi]
+
+
+-- Class methods
+-- -------------
+
+
+[case testSimpleClassMethod]
+import typing
+class A:
+  @classmethod
+  def f(cls, x: int) -> None: pass
+A.f(1)
+A().f(1)
+A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+[builtins fixtures/classmethod.pyi]
+
+[case testBuiltinClassMethod]
+import typing
+int.from_bytes(b'', '')
+int.from_bytes('', '') # E: Argument 1 to "from_bytes" of "int" has incompatible type "str"; expected "bytes"
+[builtins fixtures/classmethod.pyi]
+
+[case testAssignClassMethodOnClass]
+import typing
+class A:
+  @classmethod
+  def f(cls, x: int) -> None: pass
+A.f = A.f # E: Cannot assign to a method
+[builtins fixtures/classmethod.pyi]
+
+[case testAssignClassMethodOnInstance]
+import typing
+class A:
+  @classmethod
+  def f(cls, x: int) -> None: pass
+A().f = A.f # E: Cannot assign to a method
+[builtins fixtures/classmethod.pyi]
+
+[case testClassMethodCalledInClassMethod]
+import typing
+class C:
+  @classmethod
+  def foo(cls) -> None: pass
+  @classmethod
+  def bar(cls) -> None:
+    cls()
+    cls(1)      # E: Too many arguments for "C"
+    cls.bar()
+    cls.bar(1)  # E: Too many arguments for "bar" of "C"
+    cls.bozo()  # E: Type[C] has no attribute "bozo"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testClassMethodCalledOnClass]
+import typing
+class C:
+  @classmethod
+  def foo(cls) -> None: pass
+C.foo()
+C.foo(1)  # E: Too many arguments for "foo" of "C"
+C.bozo()  # E: Type[C] has no attribute "bozo"
+[builtins fixtures/classmethod.pyi]
+
+[case testClassMethodCalledOnInstance]
+import typing
+class C:
+  @classmethod
+  def foo(cls) -> None: pass
+C().foo()
+C().foo(1)  # E: Too many arguments for "foo" of "C"
+C.bozo()    # E: Type[C] has no attribute "bozo"
+[builtins fixtures/classmethod.pyi]
+
+[case testClassMethodMayCallAbstractMethod]
+from abc import abstractmethod
+import typing
+class C:
+  @classmethod
+  def foo(cls) -> None:
+      cls().bar()
+  @abstractmethod
+  def bar(self) -> None:
+      pass
+[builtins fixtures/classmethod.pyi]
+
+[case testClassMethodSubclassing]
+class A:
+    @classmethod
+    def f(cls) -> None: pass
+
+    def g(self) -> None: pass
+
+class B(A):
+    def f(self) -> None: pass  # E: Signature of "f" incompatible with supertype "A"
+
+    @classmethod
+    def g(cls) -> None: pass
+
+class C(A):
+    @staticmethod
+    def f() -> None: pass
+[builtins fixtures/classmethod.pyi]
+
+-- Properties
+-- ----------
+
+
+[case testAccessingReadOnlyProperty]
+import typing
+class A:
+    @property
+    def f(self) -> str: pass
+a = A()
+reveal_type(a.f)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/property.pyi]
+
+[case testAssigningToReadOnlyProperty]
+import typing
+class A:
+    @property
+    def f(self) -> str: pass
+A().f = '' # E: Property "f" defined in "A" is read-only
+[builtins fixtures/property.pyi]
+
+[case testPropertyGetterBody]
+import typing
+class A:
+    @property
+    def f(self) -> str:
+        self.x = 1
+        self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+        return ''
+[builtins fixtures/property.pyi]
+[out]
+
+[case testDynamicallyTypedProperty]
+import typing
+class A:
+    @property
+    def f(self): pass
+a = A()
+a.f.xx
+a.f = '' # E: Property "f" defined in "A" is read-only
+[builtins fixtures/property.pyi]
+
+[case testPropertyWithSetter]
+import typing
+class A:
+    @property
+    def f(self) -> int:
+        return 1
+    @f.setter
+    def f(self, x: int) -> None:
+        pass
+a = A()
+a.f = a.f
+a.f.x # E: "int" has no attribute "x"
+a.f = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+a.f = 1
+reveal_type(a.f)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/property.pyi]
+
+[case testPropertyWithDeleterButNoSetter]
+import typing
+class A:
+    @property
+    def f(self) -> int:
+        return 1
+    @f.deleter
+    def f(self, x) -> None:
+        pass
+a = A()
+a.f = a.f # E: Property "f" defined in "A" is read-only
+a.f.x # E: "int" has no attribute "x"
+[builtins fixtures/property.pyi]
+
+-- Descriptors
+-- -----------
+
+
+[case testAccessingNonDataDescriptor]
+from typing import Any
+class D:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+class A:
+    f = D()
+a = A()
+reveal_type(a.f)  # E: Revealed type is 'builtins.str'
+
+[case testSettingNonDataDescriptor]
+from typing import Any
+class D:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+class A:
+    f = D()
+a = A()
+a.f = 'foo'
+a.f = D()  # E: Incompatible types in assignment (expression has type "D", variable has type "str")
+
+[case testSettingDataDescriptor]
+from typing import Any
+class D:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+    def __set__(self, inst: Any, value: str) -> None: pass
+class A:
+    f = D()
+a = A()
+a.f = ''
+a.f = 1 # E: Argument 2 to "__set__" of "D" has incompatible type "int"; expected "str"
+
+[case testReadingDescriptorWithoutDunderGet]
+from typing import Union, Any
+class D:
+    def __set__(self, inst: Any, value: str) -> None: pass
+class A:
+    f = D()
+    def __init__(self): self.f = 's'
+a = A()
+reveal_type(a.f)  # E: Revealed type is '__main__.D'
+
+[case testAccessingDescriptorFromClass]
+# flags: --strict-optional
+from d import D, Base
+class A(Base):
+    f = D()
+reveal_type(A.f)  # E: Revealed type is 'd.D'
+reveal_type(A().f)  # E: Revealed type is 'builtins.str'
+[file d.pyi]
+from typing import TypeVar, Type, Generic, overload
+class Base: pass
+class D:
+    def __init__(self) -> None: pass
+    @overload
+    def __get__(self, inst: None, own: Type[Base]) -> D: pass
+    @overload
+    def __get__(self, inst: Base, own: Type[Base]) -> str: pass
+[builtins fixtures/bool.pyi]
+
+[case testAccessingDescriptorFromClassWrongBase]
+# flags: --strict-optional
+from d import D, Base
+class A:
+    f = D()
+reveal_type(A.f)
+reveal_type(A().f)
+[file d.pyi]
+from typing import TypeVar, Type, Generic, overload
+class Base: pass
+class D:
+    def __init__(self) -> None: pass
+    @overload
+    def __get__(self, inst: None, own: Type[Base]) -> D: pass
+    @overload
+    def __get__(self, inst: Base, own: Type[Base]) -> str: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Revealed type is 'Any'
+main:5: error: No overload variant of "__get__" of "D" matches argument types [builtins.None, Type[__main__.A]]
+main:6: error: Revealed type is 'Any'
+main:6: error: No overload variant of "__get__" of "D" matches argument types [__main__.A, Type[__main__.A]]
+
+
+[case testAccessingGenericNonDataDescriptor]
+from typing import TypeVar, Type, Generic, Any
+V = TypeVar('V')
+class D(Generic[V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: Any, own: Type) -> V: return self.v
+class A:
+    f = D(10)
+    g = D('10')
+a = A()
+reveal_type(a.f)  # E: Revealed type is 'builtins.int*'
+reveal_type(a.g)  # E: Revealed type is 'builtins.str*'
+
+[case testSettingGenericDataDescriptor]
+from typing import TypeVar, Type, Generic, Any
+V = TypeVar('V')
+class D(Generic[V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: Any, own: Type) -> V: return self.v
+    def __set__(self, inst: Any, v: V) -> None: pass
+class A:
+    f = D(10)
+    g = D('10')
+a = A()
+a.f = 1
+a.f = '' # E: Argument 2 to "__set__" of "D" has incompatible type "str"; expected "int"
+a.g = ''
+a.g = 1 # E: Argument 2 to "__set__" of "D" has incompatible type "int"; expected "str"
+
+[case testAccessingGenericDescriptorFromClass]
+# flags: --strict-optional
+from d import D
+class A:
+    f = D(10)  # type: D[A, int]
+    g = D('10')  # type: D[A, str]
+reveal_type(A.f)  # E: Revealed type is 'd.D[__main__.A*, builtins.int*]'
+reveal_type(A.g)  # E: Revealed type is 'd.D[__main__.A*, builtins.str*]'
+reveal_type(A().f)  # E: Revealed type is 'builtins.int*'
+reveal_type(A().g)  # E: Revealed type is 'builtins.str*'
+[file d.pyi]
+from typing import TypeVar, Type, Generic, overload
+T = TypeVar('T')
+V = TypeVar('V')
+class D(Generic[T, V]):
+    def __init__(self, v: V) -> None: pass
+    @overload
+    def __get__(self, inst: None, own: Type[T]) -> 'D[T, V]': pass
+    @overload
+    def __get__(self, inst: T, own: Type[T]) -> V: pass
+[builtins fixtures/bool.pyi]
+
+[case testAccessingGenericDescriptorFromInferredClass]
+# flags: --strict-optional
+from typing import Type
+from d import D
+class A:
+    f = D(10)  # type: D[A, int]
+    g = D('10')  # type: D[A, str]
+def f(some_class: Type[A]):
+    reveal_type(some_class.f)
+    reveal_type(some_class.g)
+[file d.pyi]
+from typing import TypeVar, Type, Generic, overload
+T = TypeVar('T')
+V = TypeVar('V')
+class D(Generic[T, V]):
+    def __init__(self, v: V) -> None: pass
+    @overload
+    def __get__(self, inst: None, own: Type[T]) -> 'D[T, V]': pass
+    @overload
+    def __get__(self, inst: T, own: Type[T]) -> V: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:8: error: Revealed type is 'd.D[__main__.A*, builtins.int*]'
+main:9: error: Revealed type is 'd.D[__main__.A*, builtins.str*]'
+
+[case testAccessingGenericDescriptorFromClassBadOverload]
+# flags: --strict-optional
+from d import D
+class A:
+    f = D(10)  # type: D[A, int]
+reveal_type(A.f)
+[file d.pyi]
+from typing import TypeVar, Type, Generic, overload
+T = TypeVar('T')
+V = TypeVar('V')
+class D(Generic[T, V]):
+    def __init__(self, v: V) -> None: pass
+    @overload
+    def __get__(self, inst: None, own: None) -> 'D[T, V]': pass
+    @overload
+    def __get__(self, inst: T, own: Type[T]) -> V: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Revealed type is 'Any'
+main:5: error: No overload variant of "__get__" of "D" matches argument types [builtins.None, Type[__main__.A]]
+
+[case testAccessingNonDataDescriptorSubclass]
+from typing import Any
+class C:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+class D(C): pass
+class A:
+    f = D()
+a = A()
+reveal_type(a.f)  # E: Revealed type is 'builtins.str'
+
+[case testSettingDataDescriptorSubclass]
+from typing import Any
+class C:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+    def __set__(self, inst: Any, v: str) -> None: pass
+class D(C): pass
+class A:
+    f = D()
+a = A()
+a.f = ''
+a.f = 1 # E: Argument 2 to "__set__" of "C" has incompatible type "int"; expected "str"
+
+[case testReadingDescriptorSubclassWithoutDunderGet]
+from typing import Union, Any
+class C:
+    def __set__(self, inst: Any, v: str) -> None: pass
+class D(C): pass
+class A:
+    f = D()
+    def __init__(self): self.f = 's'
+a = A()
+reveal_type(a.f)  # E: Revealed type is '__main__.D'
+
+[case testAccessingGenericNonDataDescriptorSubclass]
+from typing import TypeVar, Type, Generic, Any
+V = TypeVar('V')
+class C(Generic[V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: Any, own: Type) -> V: return self.v
+class D(C[V], Generic[V]): pass
+class A:
+    f = D(10)
+    g = D('10')
+a = A()
+reveal_type(a.f)  # E: Revealed type is 'builtins.int*'
+reveal_type(a.g)  # E: Revealed type is 'builtins.str*'
+
+[case testSettingGenericDataDescriptorSubclass]
+from typing import TypeVar, Type, Generic
+T = TypeVar('T')
+V = TypeVar('V')
+class C(Generic[T, V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: T, own: Type[T]) -> V: return self.v
+    def __set__(self, inst: T, v: V) -> None: pass
+class D(C[T, V], Generic[T, V]): pass
+class A:
+    f = D(10)  # type: D[A, int]
+    g = D('10')  # type: D[A, str]
+a = A()
+a.f = 1
+a.f = '' # E: Argument 2 to "__set__" of "C" has incompatible type "str"; expected "int"
+a.g = ''
+a.g = 1 # E: Argument 2 to "__set__" of "C" has incompatible type "int"; expected "str"
+
+[case testSetDescriptorOnClass]
+from typing import TypeVar, Type, Generic
+T = TypeVar('T')
+V = TypeVar('V')
+class D(Generic[T, V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: T, own: Type[T]) -> V: return self.v
+    def __set__(self, inst: T, v: V) -> None: pass
+class A:
+    f = D(10)  # type: D[A, int]
+A.f = D(20)
+A.f = D('some string')  # E: Argument 1 to "D" has incompatible type "str"; expected "int"
+
+[case testSetDescriptorOnInferredClass]
+from typing import TypeVar, Type, Generic, Any
+V = TypeVar('V')
+class D(Generic[V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: Any, own: Type) -> V: return self.v
+    def __set__(self, inst: Any, v: V) -> None: pass
+class A:
+    f = D(10)
+def f(some_class: Type[A]):
+    A.f = D(20)
+    A.f = D('some string')
+[out]
+main:11: error: Argument 1 to "D" has incompatible type "str"; expected "int"
+
+[case testDescriptorUncallableDunderSet]
+class D:
+    __set__ = 's'
+class A:
+    f = D()
+A().f = 'x'  # E: __main__.D.__set__ is not callable
+
+[case testDescriptorDunderSetTooFewArgs]
+class D:
+    def __set__(self, inst): pass
+class A:
+    f = D()
+A().f = 'x'  # E: Too many arguments for "__set__"
+
+[case testDescriptorDunderSetTooManyArgs]
+class D:
+    def __set__(self, inst, v, other): pass
+class A:
+    f = D()
+A().f = 'x'  # E: Too few arguments for "__set__"
+
+[case testDescriptorDunderSetWrongArgTypes]
+class D:
+    def __set__(self, inst: str, v:str) -> None: pass
+class A:
+    f = D()
+A().f = 'x'  # E: Argument 1 to "__set__" of "D" has incompatible type "A"; expected "str"
+
+[case testDescriptorUncallableDunderGet]
+class D:
+    __get__ = 's'
+class A:
+    f = D()
+A().f  # E: __main__.D.__get__ is not callable
+
+[case testDescriptorDunderGetTooFewArgs]
+class D:
+    def __get__(self, inst): pass
+class A:
+    f = D()
+A().f  # E: Too many arguments for "__get__"
+
+[case testDescriptorDunderGetTooManyArgs]
+class D:
+    def __get__(self, inst, own, other): pass
+class A:
+    f = D()
+A().f = 'x'  # E: Too few arguments for "__get__"
+
+[case testDescriptorDunderGetWrongArgTypeForInstance]
+from typing import Any
+class D:
+    def __get__(self, inst: str, own: Any) -> Any: pass
+class A:
+    f = D()
+A().f  # E: Argument 1 to "__get__" of "D" has incompatible type "A"; expected "str"
+
+[case testDescriptorDunderGetWrongArgTypeForOwner]
+from typing import Any
+class D:
+    def __get__(self, inst: Any, own: str) -> Any: pass
+class A:
+    f = D()
+A().f  # E: Argument 2 to "__get__" of "D" has incompatible type Type[A]; expected "str"
+
+[case testDescriptorGetSetDifferentTypes]
+from typing import Any
+class D:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+    def __set__(self, inst: Any, v: int) -> None: pass
+class A:
+    f = D()
+a = A()
+a.f = 1
+reveal_type(a.f)  # E: Revealed type is 'builtins.str'
+
+
+-- _promote decorators
+-- -------------------
+
+
+[case testSimpleDucktypeDecorator]
+from typing import _promote
+class A: pass
+ at _promote(A)
+class B: pass
+a = None  # type: A
+b = None  # type: B
+b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = b
+
+[case testDucktypeTransitivityDecorator]
+from typing import _promote
+class A: pass
+ at _promote(A)
+class B: pass
+ at _promote(B)
+class C: pass
+a = None  # type: A
+c = None  # type: C
+c = a # E: Incompatible types in assignment (expression has type "A", variable has type "C")
+a = c
+
+
+-- Hard coded type promotions
+-- --------------------------
+
+[case testHardCodedTypePromotions]
+import typing
+def f(x: float) -> None: pass
+def g(x: complex) -> None: pass
+f(1)
+g(1)
+g(1.1)
+[builtins fixtures/complex.pyi]
+
+
+-- Operator methods
+-- ----------------
+
+
+[case testOperatorMethodOverrideIntroducingOverloading]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A:
+    def __add__(self, x: int) -> int: pass
+class B(A):
+    @overload  # E: Signature of "__add__" incompatible with supertype "A"
+    def __add__(self, x: int) -> int: pass
+    @overload
+    def __add__(self, x: str) -> str: pass
+[out]
+
+[case testOperatorMethodOverrideWideningArgumentType]
+import typing
+class A:
+    def __add__(self, x: int) -> int: pass
+class B(A):
+    def __add__(self, x: object) -> int: pass
+[out]
+
+[case testOperatorMethodOverrideNarrowingReturnType]
+import typing
+class A:
+    def __add__(self, x: int) -> 'A': pass
+class B(A):
+    def __add__(self, x: int) -> 'B': pass
+
+[case testOperatorMethodOverrideWithDynamicallyTyped]
+import typing
+class A:
+    def __add__(self, x: int) -> 'A': pass
+class B(A):
+    def __add__(self, x): pass
+
+[case testOperatorMethodOverrideWithIdenticalOverloadedType]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A:
+    @overload
+    def __add__(self, x: int) -> 'A': pass
+    @overload
+    def __add__(self, x: str) -> 'A': pass
+class B(A):
+    @overload
+    def __add__(self, x: int) -> 'A': pass
+    @overload
+    def __add__(self, x: str) -> 'A': pass
+
+[case testOverloadedOperatorMethodOverrideWithDynamicallyTypedMethod]
+from foo import *
+[file foo.pyi]
+from typing import overload, Any
+class A:
+    @overload
+    def __add__(self, x: int) -> 'A': pass
+    @overload
+    def __add__(self, x: str) -> 'A': pass
+class B(A):
+    def __add__(self, x): pass
+class C(A):
+    def __add__(self, x: Any) -> A: pass
+
+[case testOverloadedOperatorMethodOverrideWithNewItem]
+from foo import *
+[file foo.pyi]
+from typing import overload, Any
+class A:
+    @overload
+    def __add__(self, x: int) -> 'A': pass
+    @overload
+    def __add__(self, x: str) -> 'A': pass
+class B(A):
+    @overload
+    def __add__(self, x: int) -> A: pass
+    @overload
+    def __add__(self, x: str) -> A: pass
+    @overload
+    def __add__(self, x: type) -> A: pass
+[out]
+tmp/foo.pyi:8: error: Signature of "__add__" incompatible with supertype "A"
+
+[case testOverloadedOperatorMethodOverrideWithSwitchedItemOrder]
+from foo import *
+[file foo.pyi]
+from typing import overload, Any
+class A:
+    @overload
+    def __add__(self, x: 'B') -> 'B': pass
+    @overload
+    def __add__(self, x: 'A') -> 'A': pass
+class B(A):
+    @overload
+    def __add__(self, x: 'A') -> 'A': pass
+    @overload
+    def __add__(self, x: 'B') -> 'B': pass
+[out]
+tmp/foo.pyi:8: error: Signature of "__add__" incompatible with supertype "A"
+
+[case testReverseOperatorMethodArgumentType]
+from typing import Any
+class A: pass
+class B:
+    def __radd__(self, x: A) -> int: pass # Error
+class C:
+    def __radd__(self, x: A) -> Any: pass
+class D:
+    def __radd__(self, x: A) -> object: pass
+[out]
+
+[case testReverseOperatorMethodArgumentType2]
+from typing import Any, Tuple, Callable
+class A:
+    def __radd__(self, x: Tuple[int, str]) -> int: pass
+class B:
+    def __radd__(self, x: Callable[[], int]) -> int: pass
+class C:
+    def __radd__(self, x: Any) -> int: pass
+[out]
+
+[case testReverseOperatorMethodForwardIsAny]
+from typing import Any
+def deco(f: Any) -> Any: return f
+class C:
+    @deco
+    def __add__(self, other: C) -> C: return C()
+    def __radd__(self, other: C) -> C: return C()
+[out]
+
+[case testReverseOperatorMethodForwardIsAny2]
+from typing import Any
+def deco(f: Any) -> Any: return f
+class C:
+    __add__ = None  # type: Any
+    def __radd__(self, other: C) -> C: return C()
+[out]
+
+[case testReverseOperatorMethodForwardIsAny3]
+from typing import Any
+def deco(f: Any) -> Any: return f
+class C:
+    __add__ = 42
+    def __radd__(self, other: C) -> C: return C()
+[out]
+main:5: error: Forward operator "__add__" is not callable
+
+[case testOverloadedReverseOperatorMethodArgumentType]
+from foo import *
+[file foo.pyi]
+from typing import overload, Any
+class A:
+    @overload
+    def __radd__(self, x: 'A') -> str: pass # Error
+    @overload
+    def __radd__(self, x: 'A') -> Any: pass
+[out]
+
+[case testReverseOperatorMethodArgumentTypeAndOverloadedMethod]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A:
+    @overload
+    def __add__(self, x: int) -> int: pass
+    @overload
+    def __add__(self, x: str) -> int: pass
+    def __radd__(self, x: 'A') -> str: pass
+
+[case testAbstractReverseOperatorMethod]
+import typing
+from abc import abstractmethod
+class A:
+    @abstractmethod
+    def __lt__(self, x: 'A') -> int: pass
+class B:
+    @abstractmethod
+    def __lt__(self, x: 'B') -> int: pass
+    @abstractmethod
+    def __gt__(self, x: 'B') -> int: pass
+[out]
+
+[case testOperatorMethodsAndOverloadingSpecialCase]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A:
+    @overload
+    def __add__(self, x: 'A') -> int: pass
+    @overload
+    def __add__(self, x: str) -> int: pass
+class B:
+    def __radd__(self, x: 'A') -> str: pass
+[out]
+
+[case testUnsafeOverlappingWithOperatorMethodsAndOverloading2]
+from foo import A, B
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A:
+    def __add__(self, x: 'A') -> int: pass
+class B:
+    @overload
+    def __radd__(self, x: 'X') -> str: pass # Error
+    @overload
+    def __radd__(self, x: A) -> str: pass   # Error
+class X:
+    def __add__(self, x): pass
+[out]
+tmp/foo.pyi:6: error: Signatures of "__radd__" of "B" and "__add__" of "X" are unsafely overlapping
+
+[case testUnsafeOverlappingWithLineNo]
+from typing import TypeVar
+class Real:
+    def __add__(self, other): ...
+class Fraction(Real):
+    def __radd__(self, other: Real) -> Real: ...
+[out]
+main:5: error: Signatures of "__radd__" of "Fraction" and "__add__" of "Real" are unsafely overlapping
+
+[case testOverlappingNormalAndInplaceOperatorMethod]
+import typing
+class A:
+    # Incompatible (potential trouble with __radd__)
+    def __add__(self, x: 'A') -> int: pass
+    def __iadd__(self, x: 'B') -> int: pass
+class B:
+    # Safe
+    def __add__(self, x: 'C') -> int: pass
+    def __iadd__(self, x: A) -> int: pass
+class C(A): pass
+[out]
+main:5: error: Signatures of "__iadd__" and "__add__" are incompatible
+
+[case testOverloadedNormalAndInplaceOperatorMethod]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A:
+    @overload
+    def __add__(self, x: int) -> int: pass
+    @overload
+    def __add__(self, x: str) -> int: pass
+    @overload # Error
+    def __iadd__(self, x: int) -> int: pass
+    @overload
+    def __iadd__(self, x: object) -> int: pass
+class B:
+    @overload
+    def __add__(self, x: int) -> int: pass
+    @overload
+    def __add__(self, x: str) -> str: pass
+    @overload
+    def __iadd__(self, x: int) -> int: pass
+    @overload
+    def __iadd__(self, x: str) -> str: pass
+[out]
+tmp/foo.pyi:7: error: Signatures of "__iadd__" and "__add__" are incompatible
+
+[case testIntroducingInplaceOperatorInSubclass]
+import typing
+class A:
+    def __add__(self, x: 'A') -> 'B': pass
+class B(A):
+    # __iadd__ effectively partially overrides __add__
+    def __iadd__(self, x: 'A') -> 'A': pass # Error
+class C(A):
+    def __iadd__(self, x: int) -> 'B': pass # Error
+class D(A):
+    def __iadd__(self, x: 'A') -> 'B': pass
+[out]
+main:6: error: Return type of "__iadd__" incompatible with "__add__" of supertype "A"
+main:8: error: Argument 1 of "__iadd__" incompatible with "__add__" of supertype "A"
+main:8: error: Signatures of "__iadd__" and "__add__" are incompatible
+
+[case testGetAttribute]
+
+a, b = None, None # type: A, B
+class A:
+    def __getattribute__(self, x: str) -> A:
+        return A()
+class B: pass
+
+a = a.foo
+b = a.bar
+[out]
+main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testGetAttributeSignature]
+class A:
+    def __getattribute__(self, x: str) -> A: pass
+class B:
+    def __getattribute__(self, x: A) -> B: pass
+class C:
+    def __getattribute__(self, x: str, y: str) -> C: pass
+class D:
+    def __getattribute__(self, x: str) -> None: pass
+[out]
+main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B"
+main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C"
+
+[case testGetAttr]
+
+a, b = None, None # type: A, B
+class A:
+    def __getattr__(self, x: str) -> A:
+        return A()
+class B: pass
+
+a = a.foo
+b = a.bar
+[out]
+main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testGetAttrSignature]
+class A:
+    def __getattr__(self, x: str) -> A: pass
+class B:
+    def __getattr__(self, x: A) -> B: pass
+class C:
+    def __getattr__(self, x: str, y: str) -> C: pass
+class D:
+    def __getattr__(self, x: str) -> None: pass
+[out]
+main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B"
+main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C"
+
+[case testSetAttr]
+from typing import Union, Any
+class A:
+    def __setattr__(self, name: str, value: Any) -> None: ...
+
+a = A()
+a.test = 'hello'
+
+class B:
+   def __setattr__(self, name: str, value: Union[int, str]) -> None: ...
+
+b = B()
+b.both = 1
+b.work = '2'
+
+class C:
+    def __setattr__(self, name: str, value: str) -> None: ...
+
+c = C()
+c.fail = 4  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+class D:
+    __setattr__ = 'hello'
+
+d = D()
+d.crash = 4  # E: "D" has no attribute "crash"
+
+class Ex:
+    def __setattr__(self, name: str, value: int) -> None:...
+    test = '42'  # type: str
+e = Ex()
+e.test = 'hello'
+e.t = 4
+
+class Super:
+    def __setattr__(self, name: str, value: int) -> None: ...
+
+class Sub(Super):
+    ...
+s = Sub()
+s.success = 4
+s.fail = 'fail'  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testSetAttrSignature]
+class Test:
+    def __setattr__() -> None: ...  # E: Method must have at least one argument # E: Invalid signature "def ()"
+t = Test()
+t.crash = 'test'  # E: "Test" has no attribute "crash"
+
+class A:
+    def __setattr__(self): ...  # E: Invalid signature "def (self: Any) -> Any"
+a = A()
+a.test = 4  # E: "A" has no attribute "test"
+
+class B:
+    def __setattr__(self, name, value: int): ...
+b = B()
+b.integer = 5
+
+class C:
+    def __setattr__(self, name: int, value: int) -> None: ...  # E: Invalid signature "def (__main__.C, builtins.int, builtins.int)"
+c = C()
+c.check = 13
+
+[case testGetAttrAndSetattr]
+from typing import Any
+class A:
+    def __setattr__(self, name: str, value: Any) -> None: ...
+    def __getattr__(self, name: str) -> Any: ...
+a = A()
+a.test = 4
+t = a.test
+
+class B:
+    def __setattr__(self, name: str, value: int) -> None: ...
+    def __getattr__(self, name: str) -> str: ...
+integer = 0
+b = B()
+b.at = '3'  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+integer = b.at  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+-- CallableType objects
+-- ----------------
+
+
+[case testCallableObject]
+import typing
+a = A()
+b = B()
+
+a()  # E: Too few arguments for "__call__" of "A"
+a(a, a)  # E: Too many arguments for "__call__" of "A"
+a = a(a)
+a = a(b)  # E: Argument 1 to "__call__" of "A" has incompatible type "B"; expected "A"
+b = a(a)  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+class A:
+    def __call__(self, x: A) -> A:
+        pass
+class B: pass
+
+
+-- __new__
+-- --------
+
+
+[case testConstructInstanceWith__new__]
+class C:
+    def __new__(cls, foo: int = None) -> 'C':
+        obj = object.__new__(cls)
+        return obj
+
+x = C(foo=12)
+x.a # E: "C" has no attribute "a"
+C(foo='') # E: Argument 1 to "C" has incompatible type "str"; expected "int"
+[builtins fixtures/__new__.pyi]
+
+[case testConstructInstanceWithDynamicallyTyped__new__]
+class C:
+    def __new__(cls, foo):
+        obj = object.__new__(cls)
+        return obj
+
+x = C(foo=12)
+x = C(foo='x')
+x.a # E: "C" has no attribute "a"
+C(bar='') # E: Unexpected keyword argument "bar" for "C"
+[builtins fixtures/__new__.pyi]
+
+[case testClassWith__new__AndCompatibilityWithType]
+class C:
+    def __new__(cls, foo: int = None) -> 'C':
+        obj = object.__new__(cls)
+        return obj
+def f(x: type) -> None: pass
+def g(x: int) -> None: pass
+f(C)
+g(C) # E: Argument 1 to "g" has incompatible type Type[C]; expected "int"
+[builtins fixtures/__new__.pyi]
+
+[case testClassWith__new__AndCompatibilityWithType2]
+class C:
+    def __new__(cls, foo):
+        obj = object.__new__(cls)
+        return obj
+def f(x: type) -> None: pass
+def g(x: int) -> None: pass
+f(C)
+g(C) # E: Argument 1 to "g" has incompatible type Type[C]; expected "int"
+[builtins fixtures/__new__.pyi]
+
+[case testGenericClassWith__new__]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C(Generic[T]):
+    def __new__(cls, foo: T) -> 'C[T]':
+        obj = object.__new__(cls)
+        return obj
+    def set(self, x: T) -> None: pass
+c = C('')
+c.set('')
+c.set(1) # E: Argument 1 to "set" of "C" has incompatible type "int"; expected "str"
+[builtins fixtures/__new__.pyi]
+
+[case testOverloaded__new__]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class C:
+    @overload
+    def __new__(cls, foo: int) -> 'C':
+        obj = object.__new__(cls)
+        return obj
+    @overload
+    def __new__(cls, x: str, y: str) -> 'C':
+        obj = object.__new__(cls)
+        return obj
+c = C(1)
+c.a # E: "C" has no attribute "a"
+C('', '')
+C('') # E: No overload variant of "C" matches argument types [builtins.str]
+[builtins fixtures/__new__.pyi]
+
+
+-- Special cases
+-- -------------
+
+
+[case testSubclassInt]
+import typing
+class A(int): pass
+n = 0
+n = A()
+a = A()
+a = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "A")
+
+[case testForwardReferenceToNestedClass]
+def f(o: 'B.C') -> None:
+    o.f('') # E: Argument 1 to "f" of "C" has incompatible type "str"; expected "int"
+
+class B:
+    class C:
+        def f(self, x: int) -> None: pass
+[out]
+
+[case testForwardReferenceToNestedClassDeep]
+def f(o: 'B.C.D') -> None:
+    o.f('') # E: Argument 1 to "f" of "D" has incompatible type "str"; expected "int"
+
+class B:
+    class C:
+        class D:
+            def f(self, x: int) -> None: pass
+[out]
+
+[case testForwardReferenceToNestedClassWithinClass]
+class B:
+    def f(self, o: 'C.D') -> None:
+        o.f('') # E: Argument 1 to "f" of "D" has incompatible type "str"; expected "int"
+
+    class C:
+        class D:
+            def f(self, x: int) -> None: pass
+[out]
+
+[case testClassVsInstanceDisambiguation]
+class A: pass
+def f(x: A) -> None: pass
+f(A) # E: Argument 1 to "f" has incompatible type Type[A]; expected "A"
+[out]
+
+-- TODO
+--   attribute inherited from superclass; assign in __init__
+--   refer to attribute before type has been inferred (the initialization in
+--   __init__ has not been analyzed)
+
+[case testAnyBaseClassUnconstrainedConstructor]
+from typing import Any
+B = None  # type: Any
+class C(B): pass
+C(0)
+C(arg=0)
+[out]
+
+[case testErrorMapToSupertype]
+import typing
+class X(Nope): pass  # E: Name 'Nope' is not defined
+a, b = X()  # Used to crash here (#2244)
+
+
+-- Class-valued attributes
+-- -----------------------
+
+[case testClassValuedAttributesBasics]
+class A: ...
+class B:
+    a = A
+    bad = lambda: 42
+
+B().bad() # E: Invalid method type
+reveal_type(B.a) # E: Revealed type is 'def () -> __main__.A'
+reveal_type(B().a) # E: Revealed type is 'def () -> __main__.A'
+reveal_type(B().a()) # E: Revealed type is '__main__.A'
+
+class C:
+    a = A
+    def __init__(self) -> None:
+        self.aa = self.a()
+
+reveal_type(C().aa) # E: Revealed type is '__main__.A'
+[out]
+
+[case testClassValuedAttributesGeneric]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+
+class A(Generic[T]):
+    def __init__(self, x: T) -> None:
+        self.x = x
+class B(Generic[T]):
+    a = A[T]
+
+reveal_type(B[int]().a) # E: Revealed type is 'def (x: builtins.int*) -> __main__.A[builtins.int*]'
+B[int]().a('hi') # E: Argument 1 has incompatible type "str"; expected "int"
+
+class C(Generic[T]):
+    a = A
+    def __init__(self) -> None:
+        self.aa = self.a(42)
+
+reveal_type(C().aa) # E: Revealed type is '__main__.A[builtins.int]'
+[out]
+
+[case testClassValuedAttributesAlias]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A(Generic[T, S]): ...
+
+SameA = A[T, T]
+
+class B:
+    a_any = SameA
+    a_int = SameA[int]
+
+reveal_type(B().a_any) # E: Revealed type is 'def () -> __main__.A[Any, Any]'
+reveal_type(B().a_int()) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]'
+
+class C:
+    a_int = SameA[int]
+    def __init__(self) -> None:
+        self.aa = self.a_int()
+
+reveal_type(C().aa) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]'
+[out]
+
+
+-- Type[C]
+-- -------
+
+
+[case testTypeUsingTypeCBasic]
+from typing import Type
+class User: pass
+class ProUser(User): pass
+def new_user(user_class: Type[User]) -> User:
+    return user_class()
+reveal_type(new_user(User))  # E: Revealed type is '__main__.User'
+reveal_type(new_user(ProUser))  # E: Revealed type is '__main__.User'
+[out]
+
+[case testTypeUsingTypeCDefaultInit]
+from typing import Type
+class B:
+    pass
+def f(A: Type[B]) -> None:
+    A(0)  # E: Too many arguments for "B"
+    A()
+[out]
+
+[case testTypeUsingTypeCInitWithArg]
+from typing import Type
+class B:
+    def __init__(self, a: int) -> None: pass
+def f(A: Type[B]) -> None:
+    A(0)
+    A()  # E: Too few arguments for "B"
+[out]
+
+[case testTypeUsingTypeCTypeVar]
+from typing import Type, TypeVar
+class User: pass
+class ProUser(User): pass
+U = TypeVar('U', bound=User)
+def new_user(user_class: Type[U]) -> U:
+    user = user_class()
+    reveal_type(user)
+    return user
+pro_user = new_user(ProUser)
+reveal_type(pro_user)
+[out]
+main:7: error: Revealed type is 'U`-1'
+main:10: error: Revealed type is '__main__.ProUser*'
+
+[case testTypeUsingTypeCTypeVarDefaultInit]
+from typing import Type, TypeVar
+class B:
+    pass
+T = TypeVar('T', bound=B)
+def f(A: Type[T]) -> None:
+    A()
+    A(0)  # E: Too many arguments for "B"
+[out]
+
+[case testTypeUsingTypeCTypeVarWithInit]
+from typing import Type, TypeVar
+class B:
+    def __init__(self, a: int) -> None: pass
+T = TypeVar('T', bound=B)
+def f(A: Type[T]) -> None:
+    A()  # E: Too few arguments for "B"
+    A(0)
+[out]
+
+[case testTypeUsingTypeCTwoTypeVars]
+from typing import Type, TypeVar
+class User: pass
+class ProUser(User): pass
+class WizUser(ProUser): pass
+U = TypeVar('U', bound=User)
+def new_user(u_c: Type[U]) -> U: pass
+P = TypeVar('P', bound=ProUser)
+def new_pro(pro_c: Type[P]) -> P:
+    return new_user(pro_c)
+wiz = new_pro(WizUser)
+reveal_type(wiz)
+def error(u_c: Type[U]) -> P:
+    return new_pro(u_c)  # Error here, see below
+[out]
+main:11: error: Revealed type is '__main__.WizUser*'
+main:13: error: Incompatible return value type (got "U", expected "P")
+main:13: error: Type argument 1 of "new_pro" has incompatible value "U"
+
+[case testTypeUsingTypeCCovariance]
+from typing import Type, TypeVar
+class User: pass
+class ProUser(User): pass
+def new_user(user_class: Type[User]) -> User:
+    return user_class()
+def new_pro_user(user_class: Type[ProUser]):
+    new_user(user_class)
+[out]
+
+[case testAllowCovariantArgsInConstructor]
+from typing import Generic, TypeVar
+
+T_co = TypeVar('T_co', covariant=True)
+
+class C(Generic[T_co]):
+    def __init__(self, x: T_co) -> None: # This should be allowed
+        self.x = x
+    def meth(self) -> None:
+        reveal_type(self.x) # E: Revealed type is 'T_co`1'
+
+reveal_type(C(1).x) # E: Revealed type is 'builtins.int*'
+[builtins fixtures/property.pyi]
+[out]
+
+[case testTypeUsingTypeCErrorCovariance]
+from typing import Type, TypeVar
+class User: pass
+def new_user(user_class: Type[User]):
+    return user_class()
+def foo(arg: Type[int]):
+    new_user(arg)  # E: Argument 1 to "new_user" has incompatible type Type[int]; expected Type[User]
+[out]
+
+[case testTypeUsingTypeCUnionOverload]
+from foo import *
+[file foo.pyi]
+from typing import Type, Union, overload
+class X:
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, a: int) -> None: pass
+class Y:
+    def __init__(self) -> None: pass
+def bar(o: Type[Union[X, Y]]): pass
+bar(X)
+bar(Y)
+[out]
+
+[case testTypeUsingTypeCTypeAny]
+from typing import Type, Any
+def foo(arg: Type[Any]):
+    x = arg()
+    x = arg(0)
+    x = arg('', ())
+    reveal_type(x)  # E: Revealed type is 'Any'
+    x.foo
+class X: pass
+foo(X)
+[out]
+
+[case testTypeUsingTypeCTypeAnyMember]
+from typing import Type, Any
+def foo(arg: Type[Any]):
+    x = arg.member_name
+    arg.new_member_name = 42
+    # Member access is ok and types as Any
+    reveal_type(x)  # E: Revealed type is 'Any'
+    # But Type[Any] is distinct from Any
+    y: int = arg  # E: Incompatible types in assignment (expression has type Type[Any], variable has type "int")
+[out]
+
+[case testTypeUsingTypeCTypeAnyMemberFallback]
+from typing import Type, Any
+def foo(arg: Type[Any]):
+    reveal_type(arg.__str__)  # E: Revealed type is 'def () -> builtins.str'
+    reveal_type(arg.mro())  # E: Revealed type is 'builtins.list[builtins.type]'
+[builtins fixtures/type.pyi]
+[out]
+
+[case testTypeUsingTypeCTypeNoArg]
+from typing import Type
+def foo(arg: Type):
+    x = arg()
+    reveal_type(x)  # E: Revealed type is 'Any'
+class X: pass
+foo(X)
+[out]
+
+[case testTypeUsingTypeCBuiltinType]
+from typing import Type
+def foo(arg: type): pass
+class X: pass
+def bar(arg: Type[X]):
+    foo(arg)
+foo(X)
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTypeUsingTypeCClassMethod]
+from typing import Type
+class User:
+    @classmethod
+    def foo(cls) -> int: pass
+    def bar(self) -> int: pass
+def process(cls: Type[User]):
+    reveal_type(cls.foo())  # E: Revealed type is 'builtins.int'
+    obj = cls()
+    reveal_type(cls.bar(obj))  # E: Revealed type is 'builtins.int'
+    cls.mro()  # Defined in class type
+    cls.error  # E: Type[User] has no attribute "error"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeUsingTypeCClassMethodUnion]
+from typing import Type, Union
+class User:
+    @classmethod
+    def foo(cls) -> int: pass
+    def bar(self) -> int: pass
+class ProUser(User): pass
+class BasicUser(User): pass
+def process(cls: Type[Union[BasicUser, ProUser]]):
+    cls.foo()
+    obj = cls()
+    cls.bar(obj)
+    cls.mro()  # Defined in class type
+    cls.error  # E: Item "type" of "Union[Type[BasicUser], Type[ProUser]]" has no attribute "error"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeUsingTypeCClassMethodFromTypeVar]
+from typing import Type, TypeVar
+class User:
+    @classmethod
+    def foo(cls) -> int: pass
+    def bar(self) -> int: pass
+U = TypeVar('U', bound=User)
+def process(cls: Type[U]):
+    reveal_type(cls.foo())  # E: Revealed type is 'builtins.int'
+    obj = cls()
+    reveal_type(cls.bar(obj))  # E: Revealed type is 'builtins.int'
+    cls.mro()  # Defined in class type
+    cls.error  # E: Type[U] has no attribute "error"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeUsingTypeCClassMethodFromTypeVarUnionBound]
+# Ideally this would work, but not worth the effort; just don't crash
+from typing import Type, TypeVar, Union
+class User:
+    @classmethod
+    def foo(cls) -> int: pass
+    def bar(self) -> int: pass
+class ProUser(User): pass
+class BasicUser(User): pass
+U = TypeVar('U', bound=Union[ProUser, BasicUser])
+def process(cls: Type[U]):
+    cls.foo()  # E: Type[U] has no attribute "foo"
+    obj = cls()
+    cls.bar(obj)  # E: Type[U] has no attribute "bar"
+    cls.mro()  # Defined in class type
+    cls.error  # E: Type[U] has no attribute "error"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeUsingTypeCErrorUnsupportedType]
+from typing import Type, Tuple
+def foo(arg: Type[Tuple[int]]):  # E: Unsupported type Type["Tuple[int]"]
+    arg()
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTypeUsingTypeCOverloadedClass]
+from foo import *
+[file foo.pyi]
+from typing import Type, TypeVar, overload
+class User:
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, arg: int) -> None: pass
+    @classmethod
+    def foo(cls) -> None: pass
+U = TypeVar('U', bound=User)
+def new(uc: Type[U]) -> U:
+    uc.foo()
+    u = uc()
+    u.foo()
+    u = uc(0)
+    u.foo()
+    u = uc('')
+    u.foo(0)
+    return uc()
+u = new(User)
+[builtins fixtures/classmethod.pyi]
+[out]
+tmp/foo.pyi:16: error: No overload variant of "User" matches argument types [builtins.str]
+tmp/foo.pyi:17: error: Too many arguments for "foo" of "User"
+
+[case testTypeUsingTypeCInUpperBound]
+from typing import TypeVar, Type
+class B: pass
+T = TypeVar('T', bound=Type[B])
+def f(a: T): pass
+[out]
+
+[case testTypeUsingTypeCTuple]
+from typing import Type, Tuple
+def f(a: Type[Tuple[int, int]]):
+    a()
+[out]
+main:2: error: Unsupported type Type["Tuple[int, int]"]
+
+[case testTypeUsingTypeCNamedTuple]
+from typing import Type, NamedTuple
+N = NamedTuple('N', [('x', int), ('y', int)])
+def f(a: Type[N]):
+    a()
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Unsupported type Type["N"]
+
+[case testTypeUsingTypeCJoin]
+from typing import Type
+class B: pass
+class C(B): pass
+class D(B): pass
+def foo(c: Type[C], d: Type[D]) -> None:
+    x = [c, d]
+    reveal_type(x)
+
+[builtins fixtures/list.pyi]
+[out]
+main:7: error: Revealed type is 'builtins.list[Type[__main__.B]]'
+
+[case testTypeEquivalentTypeAny]
+from typing import Type, Any
+
+a = None # type: Type[Any]
+b = a # type: type
+
+x = None # type: type
+y = x # type: Type[Any]
+
+class C: ...
+
+p = None # type: type
+q = p # type: Type[C]
+
+[builtins fixtures/list.pyi]
+[out]
+
+[case testTypeEquivalentTypeAny2]
+from typing import Type, Any, TypeVar, Generic
+
+class C: ...
+x = None # type: type
+y = None # type: Type[Any]
+z = None # type: Type[C]
+
+lst = [x, y, z]
+reveal_type(lst) # E: Revealed type is 'builtins.list[builtins.type*]'
+
+T1 = TypeVar('T1', bound=type)
+T2 = TypeVar('T2', bound=Type[Any])
+class C1(Generic[T1]): ...
+class C2(Generic[T2]): ...
+
+C1[Type[Any]], C2[type] # both these should not fail
+[builtins fixtures/list.pyi]
+[out]
+
+[case testTypeEquivalentTypeAnyEdgeCase]
+class C:
+    pass
+
+class M(type):
+    def __init__(cls, x) -> None:
+        type.__init__(cls, x)
+
+class Mbad(type):
+    def __init__(cls, x) -> None:
+        type.__init__(C(), x) # E: Argument 1 to "__init__" of "type" has incompatible type "C"; expected "type"
+[builtins fixtures/primitives.pyi]
+[out]
+
+[case testTypeMatchesOverloadedFunctions]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload, Union
+
+class User: pass
+UserType = User  # type: Type[User]
+
+ at overload
+def f(a: object) -> int: pass
+ at overload
+def f(a: int) -> str: pass
+
+reveal_type(f(User))  # E: Revealed type is 'builtins.int'
+reveal_type(f(UserType))  # E: Revealed type is 'builtins.int'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeMatchesGeneralTypeInOverloadedFunctions]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class User: pass
+UserType = User  # type: Type[User]
+
+ at overload
+def f(a: type) -> int:
+    return 1
+ at overload
+def f(a: int) -> str:
+    return "a"
+
+reveal_type(f(User))  # E: Revealed type is 'builtins.int'
+reveal_type(f(UserType))  # E: Revealed type is 'builtins.int'
+reveal_type(f(1))  # E: Revealed type is 'builtins.str'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeMatchesSpecificTypeInOverloadedFunctions]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class User: pass
+UserType = User  # type: Type[User]
+
+ at overload
+def f(a: User) -> User:
+    return User()
+ at overload
+def f(a: Type[User]) -> int:
+    return 1
+ at overload
+def f(a: int) -> str:
+    return "a"
+
+reveal_type(f(User))  # E: Revealed type is 'builtins.int'
+reveal_type(f(UserType))  # E: Revealed type is 'builtins.int'
+reveal_type(f(User()))  # E: Revealed type is 'foo.User'
+reveal_type(f(1))  # E: Revealed type is 'builtins.str'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testMixingTypeTypeInOverloadedFunctions]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: User) -> Type[User]:
+    return User
+ at overload
+def f(a: Type[User]) -> User:
+    return a()
+ at overload
+def f(a: int) -> Type[User]:
+    return User
+ at overload
+def f(a: str) -> User:
+    return User()
+
+reveal_type(f(User()))  # E: Revealed type is 'Type[foo.User]'
+reveal_type(f(User))  # E: Revealed type is 'foo.User'
+reveal_type(f(3))  # E: Revealed type is 'Type[foo.User]'
+reveal_type(f("hi"))  # E: Revealed type is 'foo.User'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testGeneralTypeDoesNotMatchSpecificTypeInOverloadedFunctions]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: Type[User]) -> None: pass
+ at overload
+def f(a: int) -> None: pass
+
+def mock() -> type: return User
+
+f(User)
+f(mock())  # E: No overload variant of "f" matches argument types [builtins.type]
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testNonTypeDoesNotMatchOverloadedFunctions]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: Type[User]) -> None: pass
+ at overload
+def f(a: type) -> None: pass
+
+f(3)  # E: No overload variant of "f" matches argument types [builtins.int]
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testInstancesDoNotMatchTypeInOverloadedFunctions]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: Type[User]) -> None: pass
+ at overload
+def f(a: int) -> None: pass
+
+f(User)
+f(User())  # E: No overload variant of "f" matches argument types [foo.User]
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeCovarianceWithOverloadedFunctions]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class A: pass
+class B(A): pass
+class C(B): pass
+AType = A  # type: Type[A]
+BType = B  # type: Type[B]
+CType = C  # type: Type[C]
+
+ at overload
+def f(a: Type[B]) -> None: pass
+ at overload
+def f(a: int) -> None: pass
+
+f(A)  # E: No overload variant of "f" matches argument types [def () -> foo.A]
+f(B)
+f(C)
+f(AType)  # E: No overload variant of "f" matches argument types [Type[foo.A]]
+f(BType)
+f(CType)
+[builtins fixtures/classmethod.pyi]
+[out]
+
+
+[case testOverloadedCovariantTypesFail]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class A: pass
+class B(A): pass
+
+ at overload
+def f(a: Type[B]) -> int: pass  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(a: Type[A]) -> str: pass
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testDistinctOverloadedCovariantTypesSucceed]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class A: pass
+class AChild(A): pass
+class B: pass
+class BChild(B): pass
+
+ at overload
+def f(a: Type[A]) -> int: pass
+ at overload
+def f(a: Type[B]) -> str: pass
+ at overload
+def f(a: A) -> A: pass
+ at overload
+def f(a: B) -> B: pass
+
+reveal_type(f(A))  # E: Revealed type is 'builtins.int'
+reveal_type(f(AChild))  # E: Revealed type is 'builtins.int'
+reveal_type(f(B))  # E: Revealed type is 'builtins.str'
+reveal_type(f(BChild))  # E: Revealed type is 'builtins.str'
+
+reveal_type(f(A()))  # E: Revealed type is 'foo.A'
+reveal_type(f(AChild()))  # E: Revealed type is 'foo.A'
+reveal_type(f(B()))  # E: Revealed type is 'foo.B'
+reveal_type(f(BChild()))  # E: Revealed type is 'foo.B'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeTypeOverlapsWithObjectAndType]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: Type[User]) -> int: pass  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(a: object) -> str: pass
+
+ at overload
+def g(a: Type[User]) -> int: pass  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def g(a: type) -> str: pass
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeOverlapsWithObject]
+from foo import *
+[file foo.pyi]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: type) -> int: pass  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(a: object) -> str: pass
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeConstructorReturnsTypeType]
+class User:
+    @classmethod
+    def test_class_method(cls) -> int: pass
+    @staticmethod
+    def test_static_method() -> str: pass
+    def test_instance_method(self) -> None: pass
+
+u = User()
+
+reveal_type(type(u))  # E: Revealed type is 'Type[__main__.User]'
+reveal_type(type(u).test_class_method())  # E: Revealed type is 'builtins.int'
+reveal_type(type(u).test_static_method())  # E: Revealed type is 'builtins.str'
+type(u).test_instance_method()  # E: Too few arguments for "test_instance_method" of "User"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testObfuscatedTypeConstructorReturnsTypeType]
+from typing import TypeVar
+class User: pass
+
+f1 = type
+
+A = TypeVar('A')
+def f2(func: A) -> A:
+    return func
+
+u = User()
+
+reveal_type(f1(u))  # E: Revealed type is 'Type[__main__.User]'
+reveal_type(f2(type)(u))  # E: Revealed type is 'Type[__main__.User]'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeConstructorLookalikeFails]
+class User: pass
+
+def fake1(a: object) -> type:
+    return User
+def fake2(a: int) -> type:
+    return User
+
+reveal_type(type(User()))  # E: Revealed type is 'Type[__main__.User]'
+reveal_type(fake1(User()))  # E: Revealed type is 'builtins.type'
+reveal_type(fake2(3))  # E: Revealed type is 'builtins.type'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testOtherTypeConstructorsSucceed]
+def foo(self) -> int: return self.attr
+
+User = type('User', (object,), {'foo': foo, 'attr': 3})
+reveal_type(User)  # E: Revealed type is 'builtins.type'
+[builtins fixtures/args.pyi]
+[out]
+
+[case testTypeTypeComparisonWorks]
+class User: pass
+
+User == User
+User == type(User())
+type(User()) == User
+type(User()) == type(User())
+
+User != User
+User != type(User())
+type(User()) != User
+type(User()) != type(User())
+
+int == int
+int == type(3)
+type(3) == int
+type(3) == type(3)
+
+int != int
+int != type(3)
+type(3) != int
+type(3) != type(3)
+
+User is User
+User is type(User)
+type(User) is User
+type(User) is type(User)
+
+int is int
+int is type(3)
+type(3) is int
+type(3) is type(3)
+
+int.__eq__(int)
+int.__eq__(3, 4)
+[builtins fixtures/args.pyi]
+[out]
+main:33: error: Too few arguments for "__eq__" of "int"
+main:33: error: Unsupported operand types for == ("int" and Type[int])
+
+[case testMroSetAfterError]
+class C(str, str):
+    foo = 0
+    bar = foo
+[out]
+main:1: error: Duplicate base class "str"
+
+[case testCannotDetermineMro]
+class A: pass
+class B(A): pass
+class C(B): pass
+class D(A, B): pass # E: Cannot determine consistent method resolution order (MRO) for "D"
+class E(C, D): pass # E: Cannot determine consistent method resolution order (MRO) for "E"
+
+[case testInconsistentMroLocalRef]
+class A: pass
+class B(object, A): # E: Cannot determine consistent method resolution order (MRO) for "B"
+    def readlines(self): pass
+    __iter__ = readlines
+
+[case testDynamicMetaclass]
+
+class C(metaclass=int()):  # E: Dynamic metaclass not supported for 'C'
+    pass
+
+[case testVariableSubclass]
+class A:
+    a = 1  # type: int
+class B(A):
+    a = 1
+[out]
+
+[case testVariableSubclassAssignMismatch]
+class A:
+    a = 1  # type: int
+class B(A):
+    a = "a"
+[out]
+main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testVariableSubclassAssignment]
+class A:
+    a = None  # type: int
+class B(A):
+    def __init__(self) -> None:
+        self.a = "a"
+[out]
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testVariableSubclassTypeOverwrite]
+class A:
+    a = None  # type: int
+class B(A):
+    a = None  # type: str
+class C(B):
+    a = "a"
+[out]
+main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testVariableSubclassTypeOverwriteImplicit]
+class A:
+    a = 1
+class B(A):
+    a = None  # type: str
+[out]
+main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testVariableSuperUsage]
+class A:
+    a = []  # type: list
+class B(A):
+    a = [1, 2]
+class C(B):
+    a = B.a + [3]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testClassAllBases]
+from typing import Union
+class A:
+    a = None  # type: Union[int, str]
+class B(A):
+    a = 1
+class C(B):
+    a = "str"
+class D(A):
+    a = "str"
+[out]
+main:7: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int")
+
+[case testVariableTypeVar]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    a = None  # type: T
+class B(A[int]):
+    a = 1
+
+[case testVariableTypeVarInvalid]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    a = None  # type: T
+class B(A[int]):
+    a = "abc"
+[out]
+main:6: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testVariableTypeVarIndirectly]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    a = None  # type: T
+class B(A[int]):
+    pass
+class C(B):
+    a = "a"
+[out]
+main:8: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testVariableTypeVarList]
+from typing import List, TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    a = None  # type: List[T]
+    b = None  # type: List[T]
+class B(A[int]):
+    a = [1]
+    b = ['']
+[builtins fixtures/list.pyi]
+[out]
+main:8: error: List item 0 has incompatible type "str"
+
+[case testVariableMethod]
+class A:
+    def a(self) -> None: pass
+    b = 1
+class B(A):
+    a = 1
+    def b(self) -> None: pass
+[out]
+main:5: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as Callable[[A], None])
+main:6: error: Signature of "b" incompatible with supertype "A"
+
+[case testVariableProperty]
+class A:
+    @property
+    def a(self) -> bool: pass
+class B(A):
+    a = None  # type: bool
+class C(A):
+    a = True
+class D(A):
+    a = 1
+[builtins fixtures/property.pyi]
+[out]
+main:9: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "bool")
+
+[case testVariableOverwriteAny]
+from typing import Any
+class A:
+    a = 1
+class B(A):
+    a = 'x'  # type: Any
+[out]
+
+[case testInstanceMethodOverwrite]
+class B():
+    def n(self, a: int) -> None: pass
+class C(B):
+    def m(self, a: int) -> None: pass
+    n = m
+[out]
+
+[case testInstanceMethodOverwriteError]
+class B():
+    def n(self, a: int) -> None: pass
+class C(B):
+    def m(self, a: str) -> None: pass
+    n = m
+[out]
+main:5: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "B" defined the type as Callable[[int], None])
+
+[case testInstanceMethodOverwriteTypevar]
+from typing import Generic, TypeVar
+T = TypeVar("T")
+class B(Generic[T]):
+    def n(self, a: T) -> None: pass
+class C(B[int]):
+    def m(self, a: int) -> None: pass
+    n = m
+
+[case testInstanceMethodOverwriteTwice]
+class I:
+    def foo(self) -> None: pass
+class A(I):
+    def foo(self) -> None: pass
+class B(A):
+    def bar(self) -> None: pass
+    foo = bar
+class C(B):
+    def bar(self) -> None: pass
+    foo = bar
+
+[case testClassMethodOverwrite]
+class B():
+    @classmethod
+    def n(self, a: int) -> None: pass
+class C(B):
+    @classmethod
+    def m(self, a: int) -> None: pass
+    n = m
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testClassMethodOverwriteError]
+class B():
+    @classmethod
+    def n(self, a: int) -> None: pass
+class C(B):
+    @classmethod
+    def m(self, a: str) -> None: pass
+    n = m
+[builtins fixtures/classmethod.pyi]
+[out]
+main:7: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "B" defined the type as Callable[[int], None])
+
+[case testClassSpec]
+from typing import Callable
+class A():
+    b = None  # type: Callable[[A, int], int]
+class B(A):
+    def c(self, a: int) -> int: pass
+    b = c
+
+[case testClassSpecError]
+from typing import Callable
+class A():
+    b = None  # type: Callable[[A, int], int]
+class B(A):
+    def c(self, a: str) -> int: pass
+    b = c
+[out]
+main:6: error: Incompatible types in assignment (expression has type Callable[[str], int], base class "A" defined the type as Callable[[int], int])
+
+[case testClassStaticMethod]
+class A():
+    @staticmethod
+    def a(a: int) -> None: pass
+class B(A):
+    @staticmethod
+    def b(a: str) -> None: pass
+    a = b
+[builtins fixtures/staticmethod.pyi]
+[out]
+main:7: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "A" defined the type as Callable[[int], None])
+
+[case testClassStaticMethodIndirect]
+class A():
+    @staticmethod
+    def a(a: int) -> None: pass
+    c = a
+class B(A):
+    @staticmethod
+    def b(a: str) -> None: pass
+    c = b
+[builtins fixtures/staticmethod.pyi]
+[out]
+main:8: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "A" defined the type as Callable[[int], None])
+
+[case testClassStaticMethodSubclassing]
+class A:
+    @staticmethod
+    def a() -> None: pass
+
+    def b(self) -> None: pass
+
+    @staticmethod
+    def c() -> None: pass
+
+class B(A):
+    def a(self) -> None: pass  # E: Signature of "a" incompatible with supertype "A"
+
+    @classmethod
+    def b(cls) -> None: pass
+
+    @staticmethod
+    def c() -> None: pass
+[builtins fixtures/classmethod.pyi]
+
+[case testTempNode]
+class A():
+    def a(self) -> None: pass
+class B(A):
+    def b(self) -> None: pass
+    a = c = b
+
+[case testListObject]
+from typing import List
+class A:
+    x = []  # type: List[object]
+class B(A):
+    x = [1]
+[builtins fixtures/list.pyi]
+
+[case testClassMemberObject]
+class A:
+    x = object()
+class B(A):
+    x = 1
+class C(B):
+    x = ''
+[out]
+main:6: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int")
+
+[case testSlots]
+class A:
+    __slots__ = ("a")
+class B(A):
+    __slots__ = ("a", "b")
+
+[case testClassOrderOfError]
+class A:
+    x = 1
+class B(A):
+    x = "a"
+class C(B):
+    x = object()
+[out]
+main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+main:6: error: Incompatible types in assignment (expression has type "object", base class "B" defined the type as "str")
+
+[case testClassOneErrorPerLine]
+class A:
+  x = 1
+class B(A):
+  x = ""
+  x = 1.0
+[out]
+main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+main:5: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testClassIgnoreType]
+class A:
+    x = 0
+class B(A):
+    x = ''  # type: ignore
+class C(B):
+    x = ''
+[out]
+
+[case testInvalidMetaclassStructure]
+class X(type): pass
+class Y(type): pass
+class A(metaclass=X): pass
+class B(A, metaclass=Y): pass  # E: Inconsistent metaclass structure for 'B'
+
+[case testMetaclassNoTypeReveal]
+class M:
+    x = 0  # type: int
+
+class A(metaclass=M): pass  # E: Metaclasses not inheriting from 'type' are not supported
+
+A.x  # E: Type[A] has no attribute "x"
+
+[case testMetaclassTypeReveal]
+from typing import Type
+class M(type):
+    x = 0  # type: int
+
+class A(metaclass=M): pass
+
+def f(TA: Type[A]):
+    reveal_type(TA)  # E: Revealed type is 'Type[__main__.A]'
+    reveal_type(TA.x)  # E: Revealed type is 'builtins.int'
+
+[case testSubclassMetaclass]
+class M1(type):
+    x = 0
+class M2(M1): pass
+class C(metaclass=M2):
+    pass
+reveal_type(C.x) # E: Revealed type is 'builtins.int'
+
+[case testMetaclassSubclass]
+from typing import Type
+class M(type):
+    x = 0  # type: int
+
+class A(metaclass=M): pass
+class B(A): pass
+
+def f(TB: Type[B]):
+    reveal_type(TB)  # E: Revealed type is 'Type[__main__.B]'
+    reveal_type(TB.x)  # E: Revealed type is 'builtins.int'
+
+[case testMetaclassIterable]
+from typing import Iterable, Iterator
+
+class BadMeta(type):
+    def __iter__(self) -> Iterator[int]: yield 1
+
+class Bad(metaclass=BadMeta): pass
+
+for _ in Bad: pass  # E: Iterable expected
+
+class GoodMeta(type, Iterable[int]):
+    def __iter__(self) -> Iterator[int]: yield 1
+
+class Good(metaclass=GoodMeta): pass
+for _ in Good: pass
+reveal_type(list(Good))  # E: Revealed type is 'builtins.list[builtins.int*]'
+
+[builtins fixtures/list.pyi]
+
+[case testMetaclassTuple]
+from typing import Tuple
+
+class M(Tuple[int]): pass
+class C(metaclass=M): pass  # E: Invalid metaclass 'M'
+
+[builtins fixtures/tuple.pyi]
+
+[case testMetaclassOperatorBeforeReversed]
+class X:
+    def __radd__(self, x: int) -> int: ...
+
+class Meta(type):
+    def __add__(cls, x: X) -> str: ...
+
+class Concrete(metaclass=Meta):
+    pass
+
+reveal_type(Concrete + X())  # E: Revealed type is 'builtins.str'
+Concrete + "hello"  # E: Unsupported operand types for + (Type[Concrete] and "str")
+
+[case testMetaclassGetitem]
+class M(type):
+    def __getitem__(self, key) -> int: return 1
+
+class A(metaclass=M): pass
+
+reveal_type(A[M])  # E: Revealed type is 'builtins.int'
+
+[case testMetaclassSelfType]
+from typing import TypeVar, Type
+
+class M(type): pass
+T = TypeVar('T')
+
+class M1(M):
+    def foo(cls: Type[T]) -> T: ...
+
+class A(metaclass=M1): pass
+reveal_type(A.foo())  # E: Revealed type is '__main__.A*'
+
+[case testMetaclassAndSkippedImport]
+# flags: --ignore-missing-imports
+from missing import M
+class A(metaclass=M):
+    y = 0
+reveal_type(A.y) # E: Revealed type is 'builtins.int'
+A.x # E: Type[A] has no attribute "x"
+
+[case testAnyMetaclass]
+from typing import Any
+M = None  # type: Any
+class A(metaclass=M):
+    y = 0
+reveal_type(A.y) # E: Revealed type is 'builtins.int'
+A.x # E: Type[A] has no attribute "x"
+
+[case testInvalidVariableAsMetaclass]
+from typing import Any
+M = 0  # type: int
+MM = 0
+class A(metaclass=M): # E: Invalid metaclass 'M'
+    y = 0
+class B(metaclass=MM): # E: Invalid metaclass 'MM'
+    y = 0
+reveal_type(A.y) # E: Revealed type is 'builtins.int'
+A.x # E: Type[A] has no attribute "x"
+
+[case testAnyAsBaseOfMetaclass]
+from typing import Any, Type
+M = None  # type: Any
+class MM(M): pass
+
+class A(metaclass=MM):
+    y = 0
+    @classmethod
+    def f(cls) -> None: pass
+    def g(self) -> None: pass
+
+def h(a: Type[A], b: Type[object]) -> None:
+    h(a, a)
+    h(b, a) # E: Argument 1 to "h" has incompatible type Type[object]; expected Type[A]
+    a.f(1) # E: Too many arguments for "f" of "A"
+    reveal_type(a.y) # E: Revealed type is 'builtins.int'
+
+x = A # type: MM
+reveal_type(A.y) # E: Revealed type is 'builtins.int'
+reveal_type(A.x) # E: Revealed type is 'Any'
+A.f(1) # E: Too many arguments for "f" of "A"
+A().g(1) # E: Too many arguments for "g" of "A"
+[builtins fixtures/classmethod.pyi]
+
+[case testMetaclassTypeCallable]
+class M(type):
+    x = 5
+
+class A(metaclass=M): pass
+reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+
+[case testMetaclassStrictSupertypeOfTypeWithClassmethods]
+from typing import Type, TypeVar
+TA = TypeVar('TA', bound='A')
+TTA = TypeVar('TTA', bound='Type[A]')
+TM = TypeVar('TM', bound='M')
+
+class M(type):
+    def g1(cls: 'Type[A]') -> A: pass #  E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M'
+    def g2(cls: Type[TA]) -> TA: pass #  E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M'
+    def g3(cls: TTA) -> TTA: pass #  E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M'
+    def g4(cls: TM) -> TM: pass
+m: M
+
+class A(metaclass=M):
+    def foo(self): pass
+
+reveal_type(A.g1)  # E: Revealed type is 'def () -> __main__.A'
+reveal_type(A.g2)  # E: Revealed type is 'def () -> __main__.A*'
+reveal_type(A.g3)  # E: Revealed type is 'def () -> def () -> __main__.A'
+reveal_type(A.g4)  # E: Revealed type is 'def () -> def () -> __main__.A'
+
+class B(metaclass=M):
+    def foo(self): pass
+
+B.g1  # Should be error: Argument 0 to "g1" of "M" has incompatible type "B"; expected Type[A]
+B.g2  # Should be error: Argument 0 to "g2" of "M" has incompatible type "B"; expected Type[TA]
+B.g3  # Should be error: Argument 0 to "g3" of "M" has incompatible type "B"; expected "TTA"
+reveal_type(B.g4)  # E: Revealed type is 'def () -> def () -> __main__.B'
+
+# 4 examples of unsoundness - instantiation, classmethod, staticmethod and ClassVar:
+
+ta: Type[A] = m  # E: Incompatible types in assignment (expression has type "M", variable has type Type[A])
+a: A = ta()
+reveal_type(ta.g1)  # E: Revealed type is 'def () -> __main__.A'
+reveal_type(ta.g2)  # E: Revealed type is 'def () -> __main__.A*'
+reveal_type(ta.g3)  # E: Revealed type is 'def () -> Type[__main__.A]'
+reveal_type(ta.g4)  # E: Revealed type is 'def () -> Type[__main__.A]'
+
+x: M = ta
+x.g1  # should be error: Argument 0 to "g1" of "M" has incompatible type "M"; expected Type[A]
+x.g2  # should be error: Argument 0 to "g2" of "M" has incompatible type "M"; expected Type[TA]
+x.g3  # should be error: Argument 0 to "g3" of "M" has incompatible type "M"; expected "TTA"
+reveal_type(x.g4)  # E: Revealed type is 'def () -> __main__.M*'
+
+def r(ta: Type[TA], tta: TTA) -> None:
+    x: M = ta
+    y: M = tta
+
+class Class(metaclass=M):
+    @classmethod
+    def f1(cls: Type[Class]) -> None: pass
+    @classmethod
+    def f2(cls: M) -> None: pass
+cl: Type[Class] = m  # E: Incompatible types in assignment (expression has type "M", variable has type Type[Class])
+reveal_type(cl.f1)  # E: Revealed type is 'def ()'
+reveal_type(cl.f2)  # E: Revealed type is 'def ()'
+x1: M = cl
+
+class Static(metaclass=M):
+    @staticmethod
+    def f() -> None: pass
+s: Type[Static] = m  # E: Incompatible types in assignment (expression has type "M", variable has type Type[Static])
+reveal_type(s.f)  # E: Revealed type is 'def ()'
+x2: M = s
+
+from typing import ClassVar
+class Cvar(metaclass=M):
+    x = 1  # type: ClassVar[int]
+cv: Type[Cvar] = m  # E: Incompatible types in assignment (expression has type "M", variable has type Type[Cvar])
+cv.x
+x3: M = cv
+
+[builtins fixtures/classmethod.pyi]
+
+[case testMetaclassOverloadResolution]
+from typing import Type, overload
+class A: pass
+
+class EM(type): pass
+class E(metaclass=EM): pass
+
+class EM1(type): pass
+class E1(metaclass=EM1): pass
+
+ at overload
+def f(x: EM) -> int: ...
+ at overload
+def f(x: EM1) -> A: ...
+ at overload
+def f(x: str) -> str: ...
+def f(x: object) -> object: return ''
+
+e: EM
+reveal_type(f(e))  # E: Revealed type is 'builtins.int'
+
+et: Type[E]
+reveal_type(f(et))  # E: Revealed type is 'builtins.int'
+
+e1: EM1
+reveal_type(f(e1))  # E: Revealed type is '__main__.A'
+
+e1t: Type[E1]
+reveal_type(f(e1t))  # E: Revealed type is '__main__.A'
+
+reveal_type(f(''))  # E: Revealed type is 'builtins.str'
+
+-- Synthetic types crashes
+-- -----------------------
+
+[case testCrashInvalidArgsSyntheticClassSyntax]
+from typing import List, NamedTuple
+from mypy_extensions import TypedDict
+class TD(TypedDict):
+    x: List[int, str] # E: "list" expects 1 type argument, but 2 given
+class NM(NamedTuple):
+    x: List[int, str] # E: "list" expects 1 type argument, but 2 given
+
+# These two should never crash, reveals are in the next test
+TD({'x': []})
+NM(x=[])
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testCrashInvalidArgsSyntheticClassSyntaxReveals]
+from typing import List, NamedTuple
+from mypy_extensions import TypedDict
+class TD(TypedDict):
+    x: List[int, str] # E: "list" expects 1 type argument, but 2 given
+class NM(NamedTuple):
+    x: List[int, str] # E: "list" expects 1 type argument, but 2 given
+
+x: TD
+x1 = TD({'x': []})
+y: NM
+y1 = NM(x=[])
+reveal_type(x) # E: Revealed type is 'TypedDict('__main__.TD', {'x': builtins.list[Any]})'
+reveal_type(x1) # E: Revealed type is 'TypedDict('__main__.TD', {'x': builtins.list[Any]})'
+reveal_type(y) # E: Revealed type is 'Tuple[builtins.list[Any], fallback=__main__.NM]'
+reveal_type(y1) # E: Revealed type is 'Tuple[builtins.list[Any], fallback=__main__.NM]'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testCrashInvalidArgsSyntheticFunctionSyntax]
+from typing import List, NewType, NamedTuple
+from mypy_extensions import TypedDict
+TD = TypedDict('TD', {'x': List[int, str]}) # E: "list" expects 1 type argument, but 2 given
+NM = NamedTuple('NM', [('x', List[int, str])]) # E: "list" expects 1 type argument, but 2 given
+NT = NewType('NT', List[int, str]) # E: "list" expects 1 type argument, but 2 given
+
+# These three should not crash
+TD({'x': []})
+NM(x=[])
+NT([])
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testCrashForwardSyntheticClassSyntax]
+from typing import NamedTuple
+from mypy_extensions import TypedDict
+class A1(NamedTuple):
+    b: 'B'
+    x: int
+class A2(TypedDict):
+    b: 'B'
+    x: int
+class B:
+    pass
+x: A1
+y: A2
+reveal_type(x.b) # E: Revealed type is '__main__.B'
+reveal_type(y['b']) # E: Revealed type is '__main__.B'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testCrashForwardSyntheticFunctionSyntax]
+from typing import NamedTuple
+from mypy_extensions import TypedDict
+A1 = NamedTuple('A1', [('b', 'B'), ('x', int)])
+A2 = TypedDict('A2', {'b': 'B', 'x': int})
+class B:
+    pass
+x: A1
+y: A2
+reveal_type(x.b) # E: Revealed type is '__main__.B'
+reveal_type(y['b']) # E: Revealed type is '__main__.B'
+[builtins fixtures/dict.pyi]
+[out]
+
+-- Special support for six
+-- -----------------------
+
+[case testSixWithMetaclass]
+import six
+class M(type):
+    x = 5
+class A(six.with_metaclass(M)): pass
+reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+
+[case testSixWithMetaclass_python2]
+import six
+class M(type):
+    x = 5
+class A(six.with_metaclass(M)): pass
+reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+
+[case testFromSixWithMetaclass]
+from six import with_metaclass
+class M(type):
+    x = 5
+class A(with_metaclass(M)): pass
+reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+
+[case testSixWithMetaclassImportFrom]
+import six
+from metadefs import M
+class A(six.with_metaclass(M)): pass
+reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+[file metadefs.py]
+class M(type):
+    x = 5
+
+[case testSixWithMetaclassImport]
+import six
+import metadefs
+class A(six.with_metaclass(metadefs.M)): pass
+reveal_type(type(A).x)  # E: Revealed type is 'builtins.int'
+[file metadefs.py]
+class M(type):
+    x = 5
+
+[case testSixWithMetaclassAndBase]
+import six
+class M(type):
+    x = 5
+class A:
+    def foo(self): pass
+class B:
+    def bar(self): pass
+class C1(six.with_metaclass(M, A)): pass
+class C2(six.with_metaclass(M, A, B)): pass
+reveal_type(type(C1).x)  # E: Revealed type is 'builtins.int'
+reveal_type(type(C2).x)  # E: Revealed type is 'builtins.int'
+C1().foo()
+C1().bar()  # E: "C1" has no attribute "bar"
+C2().foo()
+C2().bar()
+C2().baz()  # E: "C2" has no attribute "baz"
+
+[case testSixWithMetaclassGenerics]
+from typing import Generic, GenericMeta, TypeVar
+import six
+class DestroyableMeta(type):
+    pass
+class Destroyable(six.with_metaclass(DestroyableMeta)):
+    pass
+T_co = TypeVar('T_co', bound='Destroyable', covariant=True)
+class ArcMeta(GenericMeta, DestroyableMeta):
+    pass
+class Arc(six.with_metaclass(ArcMeta, Generic[T_co], Destroyable)):
+    pass
+class MyDestr(Destroyable):
+    pass
+reveal_type(Arc[MyDestr]())  # E: Revealed type is '__main__.Arc[__main__.MyDestr*]'
+[builtins fixtures/bool.pyi]
+
+[case testSixWithMetaclassErrors]
+import six
+class M(type): pass
+class A(object): pass
+def f() -> type: return M
+class C1(six.with_metaclass(M), object): pass  # E: Invalid base class
+class C2(C1, six.with_metaclass(M)): pass  # E: Invalid base class
+class C3(six.with_metaclass(A)): pass  # E: Metaclasses not inheriting from 'type' are not supported
+class C4(six.with_metaclass(M), metaclass=M): pass  # E: Invalid base class
+class C5(six.with_metaclass(f())): pass  # E: Dynamic metaclass not supported for 'C5'
+
+[case testSixWithMetaclassErrors_python2-skip]
+# No error here yet
+import six
+class M(type): pass
+class C4(six.with_metaclass(M)):
+    __metaclass__ = M
diff --git a/test-data/unit/check-classvar.test b/test-data/unit/check-classvar.test
new file mode 100644
index 0000000..02ba8f0
--- /dev/null
+++ b/test-data/unit/check-classvar.test
@@ -0,0 +1,266 @@
+[case testAssignmentOnClass]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+A.x = 2
+
+[case testAssignmentOnInstance]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+A().x = 2
+[out]
+main:4: error: Cannot assign to class variable "x" via instance
+
+[case testAssignmentOnSubclassInstance]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+class B(A):
+    pass
+B().x = 2
+[out]
+main:6: error: Cannot assign to class variable "x" via instance
+
+[case testOverrideOnSelf]
+from typing import ClassVar
+class A:
+    x = None  # type: ClassVar[int]
+    def __init__(self) -> None:
+        self.x = 0
+[out]
+main:5: error: Cannot assign to class variable "x" via instance
+
+[case testOverrideOnSelfInSubclass]
+from typing import ClassVar
+class A:
+    x = None  # type: ClassVar[int]
+class B(A):
+    def __init__(self) -> None:
+        self.x = 0
+[out]
+main:6: error: Cannot assign to class variable "x" via instance
+
+[case testReadingFromInstance]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+A().x
+reveal_type(A().x)
+[out]
+main:5: error: Revealed type is 'builtins.int'
+
+[case testReadingFromSelf]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+    def __init__(self) -> None:
+        reveal_type(self.x)
+[out]
+main:5: error: Revealed type is 'builtins.int'
+
+[case testTypecheckSimple]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+y = A.x  # type: int
+
+[case testTypecheckWithUserType]
+from typing import ClassVar
+class A:
+    pass
+class B:
+    x = A()  # type: ClassVar[A]
+
+[case testTypeCheckOnAssignment]
+from typing import ClassVar
+class A:
+    pass
+class B:
+    pass
+class C:
+    x = None  # type: ClassVar[A]
+C.x = B()
+[out]
+main:8: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testTypeCheckWithOverridden]
+from typing import ClassVar
+class A:
+    pass
+class B(A):
+    pass
+class C:
+    x = A()  # type: ClassVar[A]
+C.x = B()
+
+[case testRevealType]
+from typing import ClassVar
+class A:
+    x = None  # type: ClassVar[int]
+reveal_type(A.x)
+[out]
+main:4: error: Revealed type is 'builtins.int'
+
+[case testInfer]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+y = A.x
+reveal_type(y)
+[out]
+main:5: error: Revealed type is 'builtins.int'
+
+[case testAssignmentOnUnion]
+from typing import ClassVar, Union
+class A:
+    x = None  # type: int
+class B:
+    x = None  # type: ClassVar[int]
+c = A()  # type: Union[A, B]
+c.x = 1
+[out]
+main:7: error: Cannot assign to class variable "x" via instance
+
+[case testAssignmentOnInstanceFromType]
+from typing import ClassVar, Type
+class A:
+    x = None  # type: ClassVar[int]
+def f(a: Type[A]) -> None:
+    a().x = 0
+[out]
+main:5: error: Cannot assign to class variable "x" via instance
+
+[case testAssignmentOnInstanceFromSubclassType]
+from typing import ClassVar, Type
+class A:
+    x = None  # type: ClassVar[int]
+class B(A):
+    pass
+def f(b: Type[B]) -> None:
+    b().x = 0
+[out]
+main:7: error: Cannot assign to class variable "x" via instance
+
+[case testClassVarWithList]
+from typing import ClassVar, List
+class A:
+    x = None  # type: ClassVar[List[int]]
+A.x = ['a']
+A().x.append(1)
+A().x.append('')
+[builtins fixtures/list.pyi]
+[out]
+main:4: error: List item 0 has incompatible type "str"
+main:6: error: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+
+[case testClassVarWithUnion]
+from typing import ClassVar, Union
+class A:
+    x = None  # type: ClassVar[Union[int, str]]
+class B:
+    pass
+A.x = 0
+A.x = 'a'
+A.x = B()
+reveal_type(A().x)
+[out]
+main:8: error: Incompatible types in assignment (expression has type "B", variable has type "Union[int, str]")
+main:9: error: Revealed type is 'Union[builtins.int, builtins.str]'
+
+[case testOverrideWithNarrowedUnion]
+from typing import ClassVar, Union
+class A: pass
+class B: pass
+class C: pass
+class D:
+    x = None  # type: ClassVar[Union[A, B, C]]
+class E(D):
+    x = None  # type: ClassVar[Union[A, B]]
+
+[case testOverrideWithExtendedUnion]
+from typing import ClassVar, Union
+class A: pass
+class B: pass
+class C: pass
+class D:
+    x = None  # type: ClassVar[Union[A, B]]
+class E(D):
+    x = None  # type: ClassVar[Union[A, B, C]]
+[out]
+main:8: error: Incompatible types in assignment (expression has type "Union[A, B, C]", base class "D" defined the type as "Union[A, B]")
+
+[case testAssignmentToCallableRet]
+from typing import ClassVar
+class A:
+    x = None  # type: ClassVar[int]
+def f() -> A:
+    return A()
+f().x = 0
+[out]
+main:6: error: Cannot assign to class variable "x" via instance
+
+[case testOverrideWithIncomatibleType]
+from typing import ClassVar
+class A:
+    x = None  # type: ClassVar[int]
+class B(A):
+    x = None  # type: ClassVar[str]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testOverrideWithNormalAttribute]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+class B(A):
+    x = 2  # type: int
+[out]
+main:5: error: Cannot override class variable (previously declared on base class "A") with instance variable
+
+[case testOverrideWithAttributeWithClassVar]
+from typing import ClassVar
+class A:
+    x = 1  # type: int
+class B(A):
+    x = 2  # type: ClassVar[int]
+[out]
+main:5: error: Cannot override instance variable (previously declared on base class "A") with class variable
+
+[case testOverrideClassVarManyBases]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+class B:
+    x = 2  # type: int
+class C(A, B):
+    x = 3  # type: ClassVar[int]
+[out]
+main:7: error: Cannot override instance variable (previously declared on base class "B") with class variable
+
+[case testOverrideClassVarWithClassVar]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+class B(A):
+    x = 2  # type: ClassVar[int]
+
+[case testOverrideOnABCSubclass]
+from abc import ABCMeta
+from typing import ClassVar
+class A(metaclass=ABCMeta):
+    x = None  # type: ClassVar[int]
+class B(A):
+    x = 0  # type: ClassVar[int]
+
+[case testAcrossModules]
+import m
+reveal_type(m.A().x)
+m.A().x = 0
+[file m.py]
+from typing import ClassVar
+class A:
+    x = None  # type: ClassVar[int]
+[out]
+main:2: error: Revealed type is 'builtins.int'
+main:3: error: Cannot assign to class variable "x" via instance
diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test
new file mode 100644
index 0000000..9b2c0b9
--- /dev/null
+++ b/test-data/unit/check-columns.test
@@ -0,0 +1,68 @@
+[case testColumnsSyntaxError]
+# flags: --show-column-numbers
+1 +
+[out]
+main:2:4: error: invalid syntax
+
+
+[case testColumnsNestedFunctions]
+# flags: --show-column-numbers
+import typing
+def f() -> 'A':
+    def g() -> 'B':
+        return A() # fail
+    return B() # fail
+class A: pass
+class B: pass
+[out]
+main:5:8: error: Incompatible return value type (got "A", expected "B")
+main:6:4: error: Incompatible return value type (got "B", expected "A")
+
+[case testColumnsNestedFunctionsWithFastParse]
+# flags: --show-column-numbers
+import typing
+def f() -> 'A':
+    def g() -> 'B':
+        return A() # fail
+    return B() # fail
+class A: pass
+class B: pass
+[out]
+main:5:8: error: Incompatible return value type (got "A", expected "B")
+main:6:4: error: Incompatible return value type (got "B", expected "A")
+
+
+[case testColumnsMethodDefaultArgumentsAndSignatureAsComment]
+# flags: --show-column-numbers
+import typing
+class A:
+    def f(self, x = 1, y = 'hello'): # type: (int, str) -> str
+        pass
+A().f()
+A().f(1)
+A().f('') # E:0: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+A().f(1, 1) # E:0: Argument 2 to "f" of "A" has incompatible type "int"; expected "str"
+A().f(1, 'hello', 'hi') # E:0: Too many arguments for "f" of "A"
+
+[case testColumnsMultipleStatementsPerLine]
+# flags: --show-column-numbers
+x = 1
+y = 'hello'
+x = 2; y = x; y += 1
+[out]
+main:4:7: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+main:4:14: error: Unsupported operand types for + ("str" and "int")
+
+[case testColumnsSimpleIsinstance]
+# flags: --show-column-numbers
+import typing
+def f(x: object, n: int, s: str) -> None:
+    n = x # E:4: Incompatible types in assignment (expression has type "object", variable has type "int")
+    if isinstance(x, int):
+        n = x
+        s = x # E:8: Incompatible types in assignment (expression has type "int", variable has type "str")
+    n = x # E:4: Incompatible types in assignment (expression has type "object", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+
diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test
new file mode 100644
index 0000000..b8e1346
--- /dev/null
+++ b/test-data/unit/check-custom-plugin.test
@@ -0,0 +1,148 @@
+-- Test cases for user-defined plugins
+--
+-- Note: Plugins used by tests live under test-data/unit/plugins. Defining
+--       plugin files in test cases does not work reliably.
+
+[case testFunctionPlugin]
+# flags: --config-file tmp/mypy.ini
+def f() -> str: ...
+reveal_type(f())  # E: Revealed type is 'builtins.int'
+[file mypy.ini]
+[[mypy]
+plugins=<ROOT>/test-data/unit/plugins/fnplugin.py
+
+[case testFunctionPluginFullnameIsNotNone]
+# flags: --config-file tmp/mypy.ini
+from typing import Callable, TypeVar
+f: Callable[[], None]
+T = TypeVar('T')
+def g(x: T) -> T: return x  # This strips out the name of a callable
+g(f)()
+[file mypy.ini]
+[[mypy]
+plugins=<ROOT>/test-data/unit/plugins/fnplugin.py
+
+[case testTwoPlugins]
+# flags: --config-file tmp/mypy.ini
+def f(): ...
+def g(): ...
+def h(): ...
+reveal_type(f())  # E: Revealed type is 'builtins.int'
+reveal_type(g())  # E: Revealed type is 'builtins.str'
+reveal_type(h())  # E: Revealed type is 'Any'
+[file mypy.ini]
+[[mypy]
+plugins=<ROOT>/test-data/unit/plugins/fnplugin.py,
+  <ROOT>/test-data/unit/plugins/plugin2.py
+
+[case testMissingPlugin]
+# flags: --config-file tmp/mypy.ini
+[file mypy.ini]
+[[mypy]
+plugins=missing.py
+[out]
+tmp/mypy.ini:2: error: Can't find plugin 'tmp/missing.py'
+--' (work around syntax highlighting)
+
+[case testMultipleSectionsDefinePlugin]
+# flags: --config-file tmp/mypy.ini
+[file mypy.ini]
+[[acme]
+plugins=acmeplugin
+[[mypy]
+plugins=missing.py
+[[another]
+plugins=another_plugin
+[out]
+tmp/mypy.ini:4: error: Can't find plugin 'tmp/missing.py'
+--' (work around syntax highlighting)
+
+[case testInvalidPluginExtension]
+# flags: --config-file tmp/mypy.ini
+[file mypy.ini]
+[[mypy]
+plugins=dir/badext.pyi
+[file dir/badext.pyi]
+[out]
+tmp/mypy.ini:2: error: Plugin 'badext.pyi' does not have a .py extension
+
+[case testMissingPluginEntryPoint]
+# flags: --config-file tmp/mypy.ini
+[file mypy.ini]
+[[mypy]
+ plugins = <ROOT>/test-data/unit/plugins/noentry.py
+[out]
+tmp/mypy.ini:2: error: Plugin '<ROOT>/test-data/unit/plugins/noentry.py' does not define entry point function "plugin"
+
+[case testInvalidPluginEntryPointReturnValue]
+# flags: --config-file tmp/mypy.ini
+def f(): pass
+f()
+[file mypy.ini]
+[[mypy]
+
+plugins=<ROOT>/test-data/unit/plugins/badreturn.py
+[out]
+tmp/mypy.ini:3: error: Type object expected as the return value of "plugin"; got None (in <ROOT>/test-data/unit/plugins/badreturn.py)
+
+[case testInvalidPluginEntryPointReturnValue2]
+# flags: --config-file tmp/mypy.ini
+def f(): pass
+f()
+[file mypy.ini]
+[[mypy]
+plugins=<ROOT>/test-data/unit/plugins/badreturn2.py
+[out]
+tmp/mypy.ini:2: error: Return value of "plugin" must be a subclass of "mypy.plugin.Plugin" (in <ROOT>/test-data/unit/plugins/badreturn2.py)
+
+[case testAttributeTypeHookPlugin]
+# flags: --config-file tmp/mypy.ini
+from typing import Callable
+from m import Signal
+s: Signal[Callable[[int], None]] = Signal()
+s(1)
+s('') # E: Argument 1 has incompatible type "str"; expected "int"
+[file m.py]
+from typing import TypeVar, Generic, Callable
+T = TypeVar('T', bound=Callable[..., None])
+class Signal(Generic[T]):
+    __call__: Callable[..., None]  # This type is replaced by the plugin
+[file mypy.ini]
+[[mypy]
+plugins=<ROOT>/test-data/unit/plugins/attrhook.py
+
+[case testTypeAnalyzeHookPlugin]
+# flags: --config-file tmp/mypy.ini
+from typing import Callable
+from mypy_extensions import DefaultArg
+from m import Signal
+s: Signal[[int, DefaultArg(str, 'x')]] = Signal()
+reveal_type(s) # E: Revealed type is 'm.Signal[def (builtins.int, x: builtins.str =)]'
+s.x # E: Signal[Callable[[int, str], None]] has no attribute "x"
+ss: Signal[int, str] # E: Invalid "Signal" type (expected "Signal[[t, ...]]")
+[file m.py]
+from typing import TypeVar, Generic, Callable
+T = TypeVar('T', bound=Callable[..., None])
+class Signal(Generic[T]):
+    __call__: Callable[..., None]
+[file mypy.ini]
+[[mypy]
+plugins=<ROOT>/test-data/unit/plugins/type_anal_hook.py
+[builtins fixtures/dict.pyi]
+
+[case testFunctionPluginHookForReturnedCallable]
+# flags: --config-file tmp/mypy.ini
+from m import decorator1, decorator2
+ at decorator1()
+def f() -> None: pass
+ at decorator2()
+def g() -> None: pass
+reveal_type(f) # E: Revealed type is 'def (*Any, **Any) -> builtins.str'
+reveal_type(g) # E: Revealed type is 'def (*Any, **Any) -> builtins.int'
+[file m.py]
+from typing import Callable
+def decorator1() -> Callable[..., Callable[..., int]]: pass
+def decorator2() -> Callable[..., Callable[..., int]]: pass
+[file mypy.ini]
+[[mypy]
+plugins=<ROOT>/test-data/unit/plugins/named_callable.py
diff --git a/test-data/unit/check-dynamic-typing.test b/test-data/unit/check-dynamic-typing.test
new file mode 100644
index 0000000..68a174e
--- /dev/null
+++ b/test-data/unit/check-dynamic-typing.test
@@ -0,0 +1,676 @@
+-- Assignment
+-- ----------
+
+
+[case testAssignmentWithDynamic]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+
+a = d # Everything ok
+d = a
+d = d
+d.x = a
+d.x = d
+
+class A: pass
+
+[case testMultipleAssignmentWithDynamic]
+from typing import Any
+d = None # type: Any
+a, b = None, None # type: (A, B)
+
+d, a = b, b    # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+d, d = d, d, d # E: Too many values to unpack (2 expected, 3 provided)
+
+a, b = d, d
+d, d = a, b
+a, b = d
+s, t = d
+
+class A: pass
+class B: pass
+
+
+-- Expressions
+-- -----------
+
+
+[case testCallingFunctionWithDynamicArgumentTypes]
+from typing import Any
+a, b = None, None # type: (A, B)
+
+b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a = f(a)
+a = f(b)
+a = f(None)
+a = f(f)
+
+def f(x: Any) -> 'A':
+    pass
+
+class A: pass
+class B: pass
+
+[case testCallingWithDynamicReturnType]
+from typing import Any
+a, b = None, None # type: (A, B)
+
+a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+
+a = f(a)
+b = f(a)
+
+def f(x: 'A') -> Any:
+    pass
+
+class A: pass
+class B: pass
+
+[case testBinaryOperationsWithDynamicLeftOperand]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+c = None # type: C
+b = None # type: bool
+n = 0
+
+d in a  # E: Unsupported right operand type for in ("A")
+d and a
+d or a
+c = d and b # E: Incompatible types in assignment (expression has type "Union[Any, bool]", variable has type "C")
+c = d or b  # E: Incompatible types in assignment (expression has type "Union[Any, bool]", variable has type "C")
+
+c = d + a
+c = d - a
+c = d * a
+c = d / a
+c = d // a
+c = d % a
+c = d ** a
+b = d == a
+b = d != a
+b = d < a
+b = d <= a
+b = d > a
+b = d >= a
+b = d in c
+b = d and b
+b = d or b
+
+class A: pass
+class C:
+    def __contains__(self, a: A) -> bool:
+        pass
+[file builtins.py]
+class object:
+  def __init__(self): pass
+class bool: pass
+class int: pass
+class type: pass
+class function: pass
+class str: pass
+
+[case testBinaryOperationsWithDynamicAsRightOperand]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+c = None # type: C
+b = None # type: bool
+n = 0
+
+a and d
+a or d
+c = a in d
+c = b and d # E: Incompatible types in assignment (expression has type "Union[bool, Any]", variable has type "C")
+c = b or d  # E: Incompatible types in assignment (expression has type "Union[bool, Any]", variable has type "C")
+b = a + d
+b = a / d
+
+c = a + d
+c = a - d
+c = a * d
+c = a / d
+c = a // d
+c = a % d
+c = a ** d
+b = a in d
+b = b and d
+b = b or d
+
+class A:
+    def __add__(self, a: 'A') -> 'C':
+        pass
+    def __sub__(self, a: 'A') -> 'C':
+        pass
+    def __mul__(self, a: 'A') -> 'C':
+        pass
+    def __truediv__(self, a: 'A') -> 'C':
+        pass
+    def __floordiv__(self, a: 'A') -> 'C':
+        pass
+    def __mod__(self, a: 'A') -> 'C':
+        pass
+    def __pow__(self, a: 'A') -> 'C':
+        pass
+    def _lt(self, a: 'A') -> bool:
+        pass
+    def _gt(self, a: 'A') -> bool:
+        pass
+
+class C: pass
+[file builtins.py]
+class object:
+  def __init__(self): pass
+class bool: pass
+class int: pass
+class type: pass
+class function: pass
+class str: pass
+
+[case testDynamicWithUnaryExpressions]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+b = None # type: bool
+a = not d # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
+b = not d
+a = -d
+class A: pass
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testDynamicWithMemberAccess]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+
+a = d.foo(a()) # E: "A" not callable
+
+a = d.x
+a = d.foo(a, a)
+d.x = a
+d.x.y.z  # E: "A" has no attribute "y"
+
+class A: pass
+[out]
+
+[case testIndexingWithDynamic]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+
+a = d[a()] # E: "A" not callable
+d[a()] = a # E: "A" not callable
+
+a = d[a]
+d[a] = a
+d[a], d[a] = a, a
+
+class A: pass
+
+[case testTupleExpressionsWithDynamci]
+from typing import Tuple, Any
+t2 = None # type: Tuple[A, A]
+d = None # type: Any
+
+t2 = (d, d, d)  # E: Incompatible types in assignment (expression has type "Tuple[Any, Any, Any]", variable has type "Tuple[A, A]")
+t2 = (d, d)
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+[case testCastsWithDynamicType]
+from typing import Any, cast
+class A: pass
+class B: pass
+d = None # type: Any
+a = None # type: A
+b = None # type: B
+b = cast(A, d) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = cast(A, d)
+b = cast(Any, d)
+a = cast(Any, f())
+def f() -> None: pass
+
+[case testCompatibilityOfDynamicWithOtherTypes]
+from typing import Any, Tuple
+d = None # type: Any
+t = None # type: Tuple[A, A]
+# TODO: callable types, overloaded functions
+
+d = None # All ok
+d = t
+d = g
+d = A
+t = d
+f = d
+
+def g(a: 'A') -> None:
+    pass
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+
+-- Statements
+-- ----------
+
+
+[case testDynamicCondition]
+from typing import Any
+d = None # type: Any
+while d:
+    pass
+if d:
+    pass
+elif d:
+    pass
+[builtins fixtures/bool.pyi]
+
+[case testRaiseWithDynamic]
+from typing import Any
+d = None # type: Any
+raise d
+[builtins fixtures/exception.pyi]
+
+[case testReturnWithDynamic]
+from typing import Any
+d = None # type: Any
+
+def f() -> None:
+    return d # Ok
+
+def g() -> 'A':
+    return d # Ok
+
+class A: pass
+
+
+-- Implicit dynamic types for functions
+-- ------------------------------------
+
+
+[case testImplicitGlobalFunctionSignature]
+from typing import Any, Callable
+x = None # type: Any
+a = None # type: A
+g = None # type: Callable[[], None]
+h = None # type: Callable[[A], None]
+
+f()     # E: Too few arguments for "f"
+f(x, x) # E: Too many arguments for "f"
+g = f   # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
+f(a)
+f(x)
+a = f(a)
+h = f
+
+def f(x): pass
+
+class A: pass
+
+[case testImplicitGlobalFunctionSignatureWithDifferentArgCounts]
+from typing import Callable
+g0 = None # type: Callable[[], None]
+g1 = None # type: Callable[[A], None]
+g2 = None # type: Callable[[A, A], None]
+a = None # type: A
+
+g1 = f0 # E: Incompatible types in assignment (expression has type Callable[[], Any], variable has type Callable[[A], None])
+g2 = f0 # E: Incompatible types in assignment (expression has type Callable[[], Any], variable has type Callable[[A, A], None])
+g0 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[], None])
+g1 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[A], None])
+
+g0 = g0
+g2 = f2
+f0()
+f2(a, a)
+
+def f0(): pass
+
+def f2(x, y): pass
+
+class A: pass
+
+[case testImplicitGlobalFunctionSignatureWithDefaultArgs]
+from typing import Callable
+a, b = None, None # type: (A, B)
+
+g0 = None # type: Callable[[], None]
+g1 = None # type: Callable[[A], None]
+g2 = None # type: Callable[[A, A], None]
+g3 = None # type: Callable[[A, A, A], None]
+g4 = None # type: Callable[[A, A, A, A], None]
+
+f01(a, a)       # Fail
+f13()           # Fail
+f13(a, a, a, a) # Fail
+g2 = f01 # Fail
+g0 = f13 # Fail
+g4 = f13 # Fail
+
+f01()
+f01(a)
+f13(a)
+f13(a, a)
+f13(a, a, a)
+
+g0 = f01
+g1 = f01
+g1 = f13
+g2 = f13
+g3 = f13
+
+def f01(x = b): pass
+def f13(x, y = b, z = b): pass
+
+class A: pass
+class B: pass
+[out]
+main:10: error: Too many arguments for "f01"
+main:11: error: Too few arguments for "f13"
+main:12: error: Too many arguments for "f13"
+main:13: error: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[A, A], None])
+main:14: error: Incompatible types in assignment (expression has type Callable[[Any, Any, Any], Any], variable has type Callable[[], None])
+main:15: error: Incompatible types in assignment (expression has type Callable[[Any, Any, Any], Any], variable has type Callable[[A, A, A, A], None])
+
+[case testSkipTypeCheckingWithImplicitSignature]
+
+a = None # type: A
+def f():
+    a()
+def g(x):
+    a()
+    a.x
+    a + a
+    if a():
+        a()
+class A: pass
+[builtins fixtures/bool.pyi]
+
+[case testSkipTypeCheckingWithImplicitSignatureAndDefaultArgs]
+
+a = None # type: A
+def f(x=a()):
+    a()
+def g(x, y=a, z=a()):
+    a()
+class A: pass
+
+[case testImplicitMethodSignature]
+from typing import Callable
+g0 = None # type: Callable[[], None]
+g1 = None # type: Callable[[A], None]
+g2 = None # type: Callable[[A, A], None]
+a = None # type: A
+
+g0 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
+g2 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[A, A], None])
+a = a.f  # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type "A")
+
+class A:
+    def g(self) -> None:
+        a = self.f(a)
+    def f(self, x): pass
+
+g1 = a.f
+a = a.f(a)
+
+[case testSkipTypeCheckingImplicitMethod]
+
+a = None # type: A
+class A:
+    def f(self):
+        a()
+    def g(self, x, y=a()):
+        a()
+
+[case testImplicitInheritedMethod]
+from typing import Callable
+g0 = None # type: Callable[[], None]
+g1 = None # type: Callable[[A], None]
+a = None # type: A
+
+g0 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
+
+g1 = a.f
+a = a.f(a)
+
+class B:
+    def f(self, x):
+        pass
+class A(B):
+    def g(self) -> None:
+        a = self.f(a)
+
+[case testEmptyReturnWithImplicitSignature]
+import typing
+def f():
+    return
+class A:
+    def g(self):
+        return
+
+[case testVarArgsWithImplicitSignature]
+from typing import Any
+o = None # type: Any
+def f(x, *a): pass
+f() # E: Too few arguments for "f"
+f(o)
+f(o, o)
+f(o, o, o)
+[builtins fixtures/list.pyi]
+
+
+-- Implicit types for constructors
+-- -------------------------------
+
+
+[case testInitMethodWithImplicitSignature]
+from typing import Callable
+f1 = None # type: Callable[[A], A]
+f2 = None # type: Callable[[A, A], A]
+a = None # type: A
+
+A(a)   # Fail
+f1 = A # Fail
+
+A(a, a)
+f2 = A
+
+class A:
+  def __init__(self, a, b): pass
+[out]
+main:6: error: Too few arguments for "A"
+main:7: error: Incompatible types in assignment (expression has type Type[A], variable has type Callable[[A], A])
+
+[case testUsingImplicitTypeObjectWithIs]
+
+t = None # type: type
+t = A
+t = B
+
+class A: pass
+class B:
+    def __init__(self): pass
+
+
+-- Type compatibility
+-- ------------------
+
+
+[case testTupleTypeCompatibility]
+from typing import Any, Tuple
+t1 = None # type: Tuple[Any, A]
+t2 = None # type: Tuple[A, Any]
+t3 = None # type: Tuple[Any, Any]
+t4 = None # type: Tuple[A, A]
+t5 = None # type: Tuple[Any, Any, Any]
+
+t3 = t5 # E: Incompatible types in assignment (expression has type "Tuple[Any, Any, Any]", variable has type "Tuple[Any, Any]")
+t5 = t4 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[Any, Any, Any]")
+
+t1 = t1
+t1 = t2
+t1 = t3
+t1 = t4
+t2 = t1
+t2 = t3
+t2 = t4
+t3 = t1
+t3 = t2
+t3 = t4
+t4 = t1
+t4 = t2
+t4 = t3
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+[case testFunctionTypeCompatibilityAndReturnTypes]
+from typing import Any, Callable
+f1 = None # type: Callable[[], Any]
+f11 = None # type: Callable[[], Any]
+f2 = None # type: Callable[[], A]
+f3 = None # type: Callable[[], None]
+
+f2 = f3
+
+f1 = f2
+f1 = f3
+f2 = f11
+f3 = f11
+
+class A: pass
+
+[case testFunctionTypeCompatibilityAndArgumentTypes]
+from typing import Any, Callable
+f1 = None # type: Callable[[A, Any], None]
+f2 = None # type: Callable[[Any, A], None]
+f3 = None # type: Callable[[A, A], None]
+
+f1 = f1
+f1 = f2
+f1 = f3
+
+f2 = f1
+f2 = f2
+f2 = f3
+
+f3 = f1
+f3 = f2
+f3 = f3
+
+class A: pass
+
+[case testFunctionTypeCompatibilityAndArgumentCounts]
+from typing import Any, Callable
+f1 = None # type: Callable[[Any], None]
+f2 = None # type: Callable[[Any, Any], None]
+
+f1 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], None], variable has type Callable[[Any], None])
+
+
+-- Overriding
+-- ----------
+
+
+[case testOverridingMethodWithDynamicTypes]
+from typing import Any
+a, b = None, None # type: (A, B)
+
+b.f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A"
+a = a.f(b)
+
+class B:
+    def f(self, x: 'A') -> 'B':
+        pass
+    def g(self, x: 'B') -> None:
+        pass
+class A(B):
+    def f(self, x: Any) -> Any:
+        pass
+    def g(self, x: Any) -> None:
+        pass
+
+[case testOverridingMethodWithImplicitDynamicTypes]
+
+a, b = None, None # type: (A, B)
+
+b.f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A"
+a = a.f(b)
+
+class B:
+    def f(self, x: 'A') -> 'B':
+        pass
+    def g(self, x: 'B') -> None:
+        pass
+class A(B):
+    def f(self, x):
+        pass
+    def g(self, x):
+        pass
+
+[case testOverridingMethodAcrossHierarchy]
+import typing
+class C:
+    def f(self, a: 'A') -> None: pass
+class B(C):
+    def f(self, a): pass
+class A(B):
+    def f(self, a: 'D') -> None: # E: Argument 1 of "f" incompatible with supertype "C"
+        pass
+class D: pass
+[out]
+
+[case testInvalidOverrideArgumentCountWithImplicitSignature1]
+import typing
+class B:
+    def f(self, x: A) -> None: pass
+class A(B):
+    def f(self, x, y): # dynamic function not type checked
+        x()
+[out]
+
+[case testInvalidOverrideArgumentCountWithImplicitSignature2]
+import typing
+class B:
+    def f(self, x, y): pass
+class A(B):
+    def f(self, x: 'A') -> None: # E: Signature of "f" incompatible with supertype "B"
+        pass
+[out]
+
+[case testInvalidOverrideArgumentCountWithImplicitSignature3]
+import typing
+class B:
+    def f(self, x: A) -> None: pass
+class A(B):
+    def f(self, x, y) -> None: # E: Signature of "f" incompatible with supertype "B"
+        x()
+[out]
+
+
+-- Don't complain about too few/many arguments in dynamic functions
+-- ----------------------------------------------------------------
+
+[case testTooManyArgsInDynamic]
+def f() -> None: pass
+def g():
+    f(1) # Silent
+[out]
+
+[case testTooFewArgsInDynamic]
+def f(a: int) -> None: pass
+def g():
+    f() # Silent
+[out]
+
+[case testJustRightInDynamic]
+def f(a: int) -> None: pass
+def g():
+    f('') # Silent
+[out]
diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test
new file mode 100644
index 0000000..011580e
--- /dev/null
+++ b/test-data/unit/check-enum.test
@@ -0,0 +1,397 @@
+-- This test file checks Enum
+
+[case testEnumBasics]
+from enum import Enum
+class Medal(Enum):
+    gold = 1
+    silver = 2
+    bronze = 3
+m = Medal.gold
+m = 1
+[out]
+main:7: error: Incompatible types in assignment (expression has type "int", variable has type "Medal")
+
+[case testEnumNameAndValue]
+from enum import Enum
+class Truth(Enum):
+    true = True
+    false = False
+x = ''
+x = Truth.true.name
+reveal_type(Truth.true.name)
+reveal_type(Truth.false.value)
+[builtins fixtures/bool.pyi]
+[out]
+main:7: error: Revealed type is 'builtins.str'
+main:8: error: Revealed type is 'Any'
+
+[case testEnumUnique]
+import enum
+ at enum.unique
+class E(enum.Enum):
+    x = 1
+    y = 1  # NOTE: This duplicate value is not detected by mypy at the moment
+x = 1
+x = E.x
+[out]
+main:7: error: Incompatible types in assignment (expression has type "E", variable has type "int")
+
+[case testIntEnum_assignToIntVariable]
+from enum import IntEnum
+class N(IntEnum):
+    x = 1
+    y = 1
+n = 1
+n = N.x  # Subclass of int, so it's okay
+s = ''
+s = N.y
+[out]
+main:8: error: Incompatible types in assignment (expression has type "N", variable has type "str")
+
+[case testIntEnum_functionTakingIntEnum]
+from enum import IntEnum
+class SomeIntEnum(IntEnum):
+    x = 1
+def takes_some_int_enum(n: SomeIntEnum):
+    pass
+takes_some_int_enum(SomeIntEnum.x)
+takes_some_int_enum(1)  # Error
+takes_some_int_enum(SomeIntEnum(1))  # How to deal with the above
+[out]
+main:7: error: Argument 1 to "takes_some_int_enum" has incompatible type "int"; expected "SomeIntEnum"
+
+[case testIntEnum_functionTakingInt]
+from enum import IntEnum
+class SomeIntEnum(IntEnum):
+    x = 1
+def takes_int(i: int):
+    pass
+takes_int(SomeIntEnum.x)
+takes_int(2)
+
+[case testIntEnum_functionReturningIntEnum]
+from enum import IntEnum
+class SomeIntEnum(IntEnum):
+    x = 1
+def returns_some_int_enum() -> SomeIntEnum:
+    return SomeIntEnum.x
+an_int = 1
+an_int = returns_some_int_enum()
+
+an_enum = SomeIntEnum.x
+an_enum = returns_some_int_enum()
+[out]
+
+[case testEnumMethods]
+from enum import Enum
+
+class Color(Enum):
+    red = 1
+    green = 2
+
+    def m(self, x: int): pass
+    @staticmethod
+    def m2(x: int): pass
+
+Color.red.m('')
+Color.m2('')
+[builtins fixtures/staticmethod.pyi]
+[out]
+main:11: error: Argument 1 to "m" of "Color" has incompatible type "str"; expected "int"
+main:12: error: Argument 1 to "m2" of "Color" has incompatible type "str"; expected "int"
+
+[case testIntEnum_ExtendedIntEnum_functionTakingExtendedIntEnum]
+from enum import IntEnum
+class ExtendedIntEnum(IntEnum):
+    pass
+class SomeExtIntEnum(ExtendedIntEnum):
+    x = 1
+
+def takes_int(i: int):
+    pass
+takes_int(SomeExtIntEnum.x)
+
+def takes_some_ext_int_enum(s: SomeExtIntEnum):
+    pass
+takes_some_ext_int_enum(SomeExtIntEnum.x)
+
+[case testNamedTupleEnum]
+from typing import NamedTuple
+from enum import Enum
+
+N = NamedTuple('N', [('bar', int)])
+
+class E(N, Enum):
+    X = N(1)
+
+def f(x: E) -> None: pass
+
+f(E.X)
+
+[case testEnumCall]
+from enum import IntEnum
+class E(IntEnum):
+    a = 1
+x = None  # type: int
+reveal_type(E(x))
+[out]
+main:5: error: Revealed type is '__main__.E'
+
+[case testEnumIndex]
+from enum import IntEnum
+class E(IntEnum):
+    a = 1
+s = None  # type: str
+reveal_type(E[s])
+[out]
+main:5: error: Revealed type is '__main__.E'
+
+[case testEnumIndexError]
+from enum import IntEnum
+class E(IntEnum):
+    a = 1
+E[1]  # E: Enum index should be a string (actual index type "int")
+x = E[1]  # E: Enum index should be a string (actual index type "int")
+
+[case testEnumIndexIsNotAnAlias]
+from enum import Enum
+
+class E(Enum):
+    a = 1
+    b = 2
+reveal_type(E['a'])  # E: Revealed type is '__main__.E'
+E['a']
+x = E['a']
+reveal_type(x)  # E: Revealed type is '__main__.E'
+
+def get_member(name: str) -> E:
+    val = E[name]
+    return val
+
+reveal_type(get_member('a'))  # E: Revealed type is '__main__.E'
+
+[case testGenericEnum]
+from enum import Enum
+from typing import Generic, TypeVar
+
+T = TypeVar('T')
+
+class F(Generic[T], Enum):  # E: Enum class cannot be generic
+    x: T
+    y: T
+
+reveal_type(F[int].x)  # E: Revealed type is '__main__.F[builtins.int*]'
+
+[case testEnumFlag]
+from enum import Flag
+class C(Flag):
+    a = 1
+    b = 2
+x = C.a
+x = 1
+x = x | C.b
+[out]
+main:6: error: Incompatible types in assignment (expression has type "int", variable has type "C")
+
+[case testEnumIntFlag]
+from enum import IntFlag
+class C(IntFlag):
+    a = 1
+    b = 2
+x = C.a
+x = 1
+x = x | C.b
+[out]
+main:6: error: Incompatible types in assignment (expression has type "int", variable has type "C")
+
+[case testAnonymousEnum]
+from enum import Enum
+class A:
+    def f(self) -> None:
+        class E(Enum):
+            a = 1
+        self.x = E.a
+a = A()
+reveal_type(a.x)
+[out]
+main:8: error: Revealed type is '__main__.E at 4'
+
+[case testEnumInClassBody]
+from enum import Enum
+class A:
+    class E(Enum):
+        a = 1
+class B:
+    class E(Enum):
+        a = 1
+x = A.E.a
+y = B.E.a
+x = y
+[out]
+main:10: error: Incompatible types in assignment (expression has type "__main__.B.E", variable has type "__main__.A.E")
+
+[case testFunctionalEnumString]
+from enum import Enum, IntEnum
+E = Enum('E', 'foo bar')
+I = IntEnum('I', ' bar, baz ')
+reveal_type(E.foo)
+reveal_type(E.bar.value)
+reveal_type(I.bar)
+reveal_type(I.baz.value)
+[out]
+main:4: error: Revealed type is '__main__.E'
+main:5: error: Revealed type is 'Any'
+main:6: error: Revealed type is '__main__.I'
+main:7: error: Revealed type is 'builtins.int'
+
+[case testFunctionalEnumListOfStrings]
+from enum import Enum, IntEnum
+E = Enum('E', ('foo', 'bar'))
+F = IntEnum('F', ['bar', 'baz'])
+reveal_type(E.foo)
+reveal_type(F.baz)
+[out]
+main:4: error: Revealed type is '__main__.E'
+main:5: error: Revealed type is '__main__.F'
+
+[case testFunctionalEnumListOfPairs]
+from enum import Enum, IntEnum
+E = Enum('E', [('foo', 1), ['bar', 2]])
+F = IntEnum('F', (['bar', 1], ('baz', 2)))
+reveal_type(E.foo)
+reveal_type(F.baz)
+reveal_type(E.foo.value)
+reveal_type(F.bar.name)
+[out]
+main:4: error: Revealed type is '__main__.E'
+main:5: error: Revealed type is '__main__.F'
+main:6: error: Revealed type is 'Any'
+main:7: error: Revealed type is 'builtins.str'
+
+[case testFunctionalEnumDict]
+from enum import Enum, IntEnum
+E = Enum('E', {'foo': 1, 'bar': 2})
+F = IntEnum('F', {'bar': 1, 'baz': 2})
+reveal_type(E.foo)
+reveal_type(F.baz)
+reveal_type(E.foo.value)
+reveal_type(F.bar.name)
+[out]
+main:4: error: Revealed type is '__main__.E'
+main:5: error: Revealed type is '__main__.F'
+main:6: error: Revealed type is 'Any'
+main:7: error: Revealed type is 'builtins.str'
+
+[case testFunctionalEnumErrors]
+from enum import Enum, IntEnum
+A = Enum('A')
+B = Enum('B', 42)
+C = Enum('C', 'a b', 'x')
+D = Enum('D', foo)
+bar = 'x y z'
+E = Enum('E', bar)
+I = IntEnum('I')
+J = IntEnum('I', 42)
+K = IntEnum('I', 'p q', 'z')
+L = Enum('L', ' ')
+M = Enum('M', ())
+N = IntEnum('M', [])
+P = Enum('P', [42])
+Q = Enum('Q', [('a', 42, 0)])
+R = IntEnum('R', [[0, 42]])
+S = Enum('S', {1: 1})
+T = Enum('T', keyword='a b')
+U = Enum('U', *['a'])
+V = Enum('U', **{'a': 1})
+W = Enum('W', 'a b')
+W.c
+[out]
+main:2: error: Too few arguments for Enum()
+main:3: error: Enum() expects a string, tuple, list or dict literal as the second argument
+main:4: error: Too many arguments for Enum()
+main:5: error: Enum() expects a string, tuple, list or dict literal as the second argument
+main:5: error: Name 'foo' is not defined
+main:7: error: Enum() expects a string, tuple, list or dict literal as the second argument
+main:8: error: Too few arguments for IntEnum()
+main:9: error: IntEnum() expects a string, tuple, list or dict literal as the second argument
+main:10: error: Too many arguments for IntEnum()
+main:11: error: Enum() needs at least one item
+main:12: error: Enum() needs at least one item
+main:13: error: IntEnum() needs at least one item
+main:14: error: Enum() with tuple or list expects strings or (name, value) pairs
+main:15: error: Enum() with tuple or list expects strings or (name, value) pairs
+main:16: error: IntEnum() with tuple or list expects strings or (name, value) pairs
+main:17: error: Enum() with dict literal requires string literals
+main:18: error: Unexpected arguments to Enum()
+main:19: error: Unexpected arguments to Enum()
+main:20: error: Unexpected arguments to Enum()
+main:22: error: Type[W] has no attribute "c"
+
+[case testFunctionalEnumFlag]
+from enum import Flag, IntFlag
+A = Flag('A', 'x y')
+B = IntFlag('B', 'a b')
+reveal_type(A.x)
+reveal_type(B.a)
+[out]
+main:4: error: Revealed type is '__main__.A'
+main:5: error: Revealed type is '__main__.B'
+
+[case testAnonymousFunctionalEnum]
+from enum import Enum
+class A:
+    def f(self) -> None:
+        E = Enum('E', 'a b')
+        self.x = E.a
+a = A()
+reveal_type(a.x)
+[out]
+main:7: error: Revealed type is '__main__.A.E at 4'
+
+[case testFunctionalEnumInClassBody]
+from enum import Enum
+class A:
+    E = Enum('E', 'a b')
+class B:
+    E = Enum('E', 'a b')
+x = A.E.a
+y = B.E.a
+x = y
+[out]
+main:8: error: Incompatible types in assignment (expression has type "__main__.B.E", variable has type "__main__.A.E")
+
+[case testFunctionalEnum_python2]
+from enum import Enum
+Eu = Enum(u'Eu', u'a b')
+Eb = Enum(b'Eb', b'a b')
+Gu = Enum(u'Gu', {u'a': 1})
+Gb = Enum(b'Gb', {b'a': 1})
+Hu = Enum(u'Hu', [u'a'])
+Hb = Enum(b'Hb', [b'a'])
+Eu.a
+Eb.a
+Gu.a
+Gb.a
+Hu.a
+Hb.a
+[out]
+
+[case testEnumIncremental]
+import m
+reveal_type(m.E.a)
+reveal_type(m.F.b)
+[file m.py]
+from enum import Enum
+class E(Enum):
+    a = 1
+    b = 2
+F = Enum('F', 'a b')
+[rechecked]
+[stale]
+[out1]
+main:2: error: Revealed type is 'm.E'
+main:3: error: Revealed type is 'm.F'
+[out2]
+main:2: error: Revealed type is 'm.E'
+main:3: error: Revealed type is 'm.F'
+
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
new file mode 100644
index 0000000..03bf5bd
--- /dev/null
+++ b/test-data/unit/check-expressions.test
@@ -0,0 +1,1735 @@
+-- Test cases for simple expressions.
+--
+-- See also:
+--  * check-functions.test contains test cases for calls.
+--  * check-varargs.test contains test cases for *args.
+--  * check-dynamic.test contains test cases related to 'Any' type.
+--  * check-generics.test contains test cases for generic values.
+
+
+-- None expression
+-- ---------------
+
+
+[case testNoneAsRvalue]
+import typing
+a = None # type: A
+class A: pass
+[out]
+
+[case testNoneAsArgument]
+import typing
+def f(x: 'A', y: 'B') -> None: pass
+f(None, None)
+class A: pass
+class B(A): pass
+[out]
+
+
+-- Simple expressions
+-- ------------------
+
+
+[case testIntLiteral]
+a = 0
+b = None # type: A
+b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "A")
+a = 1
+class A:
+    pass
+
+[case testStrLiteral]
+a = ''
+b = None # type: A
+b = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+a = 'x'
+a = r"x"
+a = """foo"""
+class A:
+    pass
+
+[case testFloatLiteral]
+a = 0.0
+b = None # type: A
+b = 1.1 # E: Incompatible types in assignment (expression has type "float", variable has type "A")
+a = 1.1
+class A:
+    pass
+[file builtins.py]
+class object:
+    def __init__(self): pass
+class type: pass
+class function: pass
+class float: pass
+class str: pass
+
+[case testComplexLiteral]
+a = 0.0j
+b = None # type: A
+b = 1.1j # E: Incompatible types in assignment (expression has type "complex", variable has type "A")
+a = 1.1j
+class A:
+    pass
+[file builtins.py]
+class object:
+    def __init__(self): pass
+class type: pass
+class function: pass
+class complex: pass
+class str: pass
+
+[case testBytesLiteral]
+b, a = None, None # type: (bytes, A)
+b = b'foo'
+b = br"foo"
+b = b'''foo'''
+a = b'foo' # E: Incompatible types in assignment (expression has type "bytes", variable has type "A")
+class A: pass
+[file builtins.py]
+class object:
+    def __init__(self): pass
+class type: pass
+class tuple: pass
+class function: pass
+class bytes: pass
+class str: pass
+
+[case testUnicodeLiteralInPython3]
+s = None  # type: str
+s = u'foo'
+b = None  # type: bytes
+b = u'foo' # E: Incompatible types in assignment (expression has type "str", variable has type "bytes")
+[builtins fixtures/primitives.pyi]
+
+
+-- Binary operators
+-- ----------------
+
+
+[case testAdd]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a + c  # Fail
+a = a + b  # Fail
+c = b + a  # Fail
+c = a + b
+
+class A:
+    def __add__(self, x: 'B') -> 'C': pass
+class B: pass
+class C: pass
+[out]
+main:3: error: Unsupported operand types for + ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for + ("B")
+[case testAdd]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a + c  # Fail
+a = a + b  # Fail
+c = b + a  # Fail
+c = a + b
+
+class A:
+    def __add__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for + ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for + ("B")
+
+[case testSub]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a - c  # Fail
+a = a - b  # Fail
+c = b - a  # Fail
+c = a - b
+
+class A:
+    def __sub__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for - ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for - ("B")
+
+[case testMul]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a * c  # Fail
+a = a * b  # Fail
+c = b * a  # Fail
+c = a * b
+
+class A:
+    def __mul__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for * ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for * ("B")
+
+[case testMatMul]
+a, b, c = None, None, None # type: (A, B, C)
+c = a @ c  # E: Unsupported operand types for @ ("A" and "C")
+a = a @ b  # E: Incompatible types in assignment (expression has type "C", variable has type "A")
+c = b @ a  # E: Unsupported left operand type for @ ("B")
+c = a @ b
+
+class A:
+    def __matmul__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+
+[case testDiv]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a / c  # Fail
+a = a / b  # Fail
+c = b / a  # Fail
+c = a / b
+
+class A:
+    def __truediv__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for / ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for / ("B")
+
+[case testIntDiv]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a // c  # Fail
+a = a // b  # Fail
+c = b // a  # Fail
+c = a // b
+
+class A:
+    def __floordiv__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for // ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for // ("B")
+
+[case testMod]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a % c  # Fail
+a = a % b  # Fail
+c = b % a  # Fail
+c = a % b
+
+class A:
+    def __mod__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for % ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for % ("B")
+
+[case testPow]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a ** c  # Fail
+a = a ** b  # Fail
+c = b ** a  # Fail
+c = a ** b
+
+class A:
+    def __pow__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for ** ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for ** ("B")
+
+[case testMiscBinaryOperators]
+
+a, b = None, None # type: (A, B)
+b = a & a  # Fail
+b = a | b  # Fail
+b = a ^ a  # Fail
+b = a << b # Fail
+b = a >> a # Fail
+
+b = a & b
+b = a | a
+b = a ^ b
+b = a << a
+b = a >> b
+class A:
+  def __and__(self, x: 'B') -> 'B': pass
+  def __or__(self, x: 'A') -> 'B': pass
+  def __xor__(self, x: 'B') -> 'B': pass
+  def __lshift__(self, x: 'A') -> 'B': pass
+  def __rshift__(self, x: 'B') -> 'B': pass
+class B: pass
+[out]
+main:3: error: Unsupported operand types for & ("A" and "A")
+main:4: error: Unsupported operand types for | ("A" and "B")
+main:5: error: Unsupported operand types for ^ ("A" and "A")
+main:6: error: Unsupported operand types for << ("A" and "B")
+main:7: error: Unsupported operand types for >> ("A" and "A")
+
+[case testBooleanAndOr]
+
+a, b = None, None # type: (A, bool)
+b = b and b
+b = b or b
+b = b and a # E: Incompatible types in assignment (expression has type "Union[bool, A]", variable has type "bool")
+b = a and b # E: Incompatible types in assignment (expression has type "Union[A, bool]", variable has type "bool")
+b = b or a  # E: Incompatible types in assignment (expression has type "Union[bool, A]", variable has type "bool")
+b = a or b  # E: Incompatible types in assignment (expression has type "Union[A, bool]", variable has type "bool")
+class A: pass
+
+[builtins fixtures/bool.pyi]
+
+[case testRestrictedTypeAnd]
+
+b = None # type: bool
+i = None # type: str
+j = not b and i
+if j:
+    reveal_type(j) # E: Revealed type is 'builtins.str'
+[builtins fixtures/bool.pyi]
+
+[case testRestrictedTypeOr]
+
+b = None # type: bool
+i = None # type: str
+j = b or i
+if not j:
+    reveal_type(j) # E: Revealed type is 'builtins.str'
+[builtins fixtures/bool.pyi]
+
+[case testAndOr]
+
+s = ""
+b = bool()
+reveal_type(s and b or b)  # E: Revealed type is 'builtins.bool'
+[builtins fixtures/bool.pyi]
+
+[case testNonBooleanOr]
+
+c, d, b = None, None, None # type: (C, D, bool)
+c = c or c
+c = c or d
+c = d or c
+b = c or c # E: Incompatible types in assignment (expression has type "C", variable has type "bool")
+d = c or d # E: Incompatible types in assignment (expression has type "C", variable has type "D")
+d = d or c # E: Incompatible types in assignment (expression has type "C", variable has type "D")
+class C: pass
+class D(C): pass
+[builtins fixtures/bool.pyi]
+
+[case testInOperator]
+from typing import Iterator, Iterable, Any
+a, b, c, d, e = None, None, None, None, None # type: (A, B, bool, D, Any)
+c = c in a  # Fail
+a = b in a  # Fail
+c = a in b  # Fail
+c = b in d  # Fail
+c = b in a
+c = a in d
+c = e in d
+c = a in e
+
+class A:
+    def __contains__(self, x: 'B') -> bool: pass
+class B: pass
+class D(Iterable[A]):
+    def __iter__(self) -> Iterator[A]: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Unsupported operand types for in ("bool" and "A")
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:5: error: Unsupported right operand type for in ("B")
+main:6: error: Unsupported operand types for in ("B" and "D")
+
+[case testNotInOperator]
+from typing import Iterator, Iterable, Any
+a, b, c, d, e = None, None, None, None, None # type: (A, B, bool, D, Any)
+c = c not in a  # Fail
+a = b not in a  # Fail
+c = a not in b  # Fail
+c = b not in d  # Fail
+c = b not in a
+c = a not in d
+c = e in d
+c = a in e
+
+class A:
+    def __contains__(self, x: 'B') -> bool: pass
+class B: pass
+class D(Iterable[A]):
+    def __iter__(self) -> Iterator[A]: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Unsupported operand types for in ("bool" and "A")
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:5: error: Unsupported right operand type for in ("B")
+main:6: error: Unsupported operand types for in ("B" and "D")
+
+[case testNonBooleanContainsReturnValue]
+
+a, b = None, None # type: (A, bool)
+b = a not in a
+b = a in a
+
+class A:
+  def __contains__(self, x: 'A') -> object: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:4: error: Incompatible types in assignment (expression has type "object", variable has type "bool")
+
+[case testEq]
+
+a, b = None, None # type: (A, bool)
+a = a == b # Fail
+a = a != b # Fail
+b = a == b
+b = a != b
+
+class A:
+  def __eq__(self, o: object) -> bool: pass
+  def __ne__(self, o: object) -> bool: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testLtAndGt]
+
+a, b, bo = None, None, None # type: (A, B, bool)
+a = a < b # Fail
+a = a > b # Fail
+bo = a < b
+bo = a > b
+
+class A:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+class B:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testCmp_python2]
+
+a, b, c, bo = None, None, None, None # type: (A, B, C, bool)
+bo = a == a  # E: Unsupported operand types for == ("A" and "A")
+bo = a != a  # E: Argument 1 to "__cmp__" of "A" has incompatible type "A"; expected "B"
+bo = a < b
+bo = a > b
+bo = b <= b
+bo = b <= c
+bo = b >= c  # E: Argument 1 to "__cmp__" of "B" has incompatible type "C"; expected "B"
+bo = a >= b
+bo = c >= b
+bo = c <= b  # E: Argument 1 to "__cmp__" of "C" has incompatible type "B"; expected "A"
+bo = a == c
+bo = b == c  # E: Unsupported operand types for == ("C" and "B")
+
+class A:
+    def __cmp__(self, o):
+      # type: ('B') -> bool
+      pass
+    def __eq__(self, o):
+      # type: ('int') -> bool
+      pass
+class B:
+    def __cmp__(self, o):
+        # type: ('B') -> bool
+        pass
+    def __le__(self, o):
+        # type: ('C') -> bool
+        pass
+class C:
+    def __cmp__(self, o):
+      # type: ('A') -> bool
+      pass
+    def __eq__(self, o):
+      # type: ('int') -> bool
+      pass
+
+[builtins_py2 fixtures/bool.pyi]
+
+[case cmpIgnoredPy3]
+
+a, b, bo = None, None, None # type: (A, B, bool)
+bo = a <= b # E: Unsupported left operand type for <= ("A")
+
+class A:
+    def __cmp__(self, o: 'B') -> bool: pass
+class B:
+    pass
+
+[builtins fixtures/bool.pyi]
+
+[case testLeAndGe]
+
+a, b, bo = None, None, None # type: (A, B, bool)
+a = a <= b # Fail
+a = a >= b # Fail
+bo = a <= b
+bo = a >= b
+
+class A:
+    def __le__(self, o: 'B') -> bool: pass
+    def __ge__(self, o: 'B') -> bool: pass
+class B:
+    def __le__(self, o: 'B') -> bool: pass
+    def __ge__(self, o: 'B') -> bool: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testChainedComp]
+
+a, b, bo = None, None, None # type: (A, B, bool)
+a < a < b < b # Fail
+a < b < b < b
+a < a > a < b # Fail
+
+class A:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+class B:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Unsupported operand types for > ("A" and "A")
+main:5: error: Unsupported operand types for > ("A" and "A")
+main:5: error: Unsupported operand types for < ("A" and "A")
+
+
+[case testChainedCompBoolRes]
+
+a, b, bo = None, None, None # type: (A, B, bool)
+bo = a < b < b
+a = a < b < b # Fail
+
+class A:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+class B:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+
+[case testChainedCompResTyp]
+
+x, y = None, None # type: (X, Y)
+a, b, p, bo = None, None, None, None # type: (A, B, P, bool)
+b = y == y == y
+bo = y == y == y # Fail
+a = x < y
+a = x < y == y # Fail
+p = x < y == y
+
+class P:
+    pass
+class A(P):
+    pass
+class B(P):
+    pass
+
+class X:
+    def __lt__(self, o: 'Y') -> A: pass
+    def __gt__(self, o: 'Y') -> A: pass
+class Y:
+    def __lt__(self, o: 'Y') -> A: pass
+    def __gt__(self, o: 'Y') -> A: pass
+    def __eq__(self, o: 'Y') -> B: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "bool")
+main:7: error: Incompatible types in assignment (expression has type "P", variable has type "A")
+
+
+[case testIs]
+
+a, b = None, None # type: (A, bool)
+a = a is b # Fail
+b = a is b
+b = b is a
+b = a is None
+class A: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testIsNot]
+
+a, b = None, None # type: (A, bool)
+a = a is not b # Fail
+b = a is not b
+b = b is not a
+b = a is not None
+class A: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testReverseBinaryOperator]
+
+class A:
+    def __add__(self, x: int) -> int: pass
+class B:
+    def __radd__(self, x: A) -> str: pass
+s = None  # type: str
+n = None  # type: int
+n = A() + 1
+s = A() + B()
+n = A() + B() # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testReverseBinaryOperator2]
+
+class A:
+    def __add__(self, x: 'A') -> object: pass
+class B:
+    def __radd__(self, x: A) -> str: pass
+s = None  # type: str
+n = None  # type: int
+s = A() + B()
+n = A() + B() # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testReverseBinaryOperator3]
+
+class N:
+    def __add__(self, x: 'N') -> object: pass
+class A:
+    def __add__(self, x: N) -> int: pass
+class B:
+    def __radd__(self, x: N) -> str: pass
+s = None  # type: str
+s = A() + B() # E: Unsupported operand types for + ("A" and "B")
+
+[case testBinaryOperatorWithAnyRightOperand]
+from typing import Any, cast
+class A: pass
+A() + cast(Any, 1)
+
+[case testReverseComparisonOperator]
+
+class C:
+    def __gt__(self, x: 'A') -> object: pass
+class A:
+    def __lt__(self, x: C) -> int: pass
+class B:
+    def __gt__(self, x: A) -> str: pass
+s = None  # type: str
+n = None  # type: int
+n = A() < C()
+s = A() < B()
+n = A() < B() # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+s = object() < B() # E: Unsupported operand types for > ("B" and "object")
+
+[case testErrorContextAndBinaryOperators]
+import typing
+class A:
+    def __getitem__(self, i: str) -> int: pass
+def f() -> None:
+    A()[1] # Error
+class B:
+    A()[1] # Error
+A()[1] # Error
+[out]
+main:5: error: Invalid index type "int" for "A"; expected type "str"
+main:7: error: Invalid index type "int" for "A"; expected type "str"
+main:8: error: Invalid index type "int" for "A"; expected type "str"
+
+[case testErrorContextAndBinaryOperators2]
+import m
+[file m.py]
+import typing
+class A:
+    def __getitem__(self, i: str) -> int: pass
+def f() -> None:
+    A()[1] # Error
+class B:
+    A()[1] # Error
+A()[1] # Error
+[out]
+tmp/m.py:5: error: Invalid index type "int" for "A"; expected type "str"
+tmp/m.py:7: error: Invalid index type "int" for "A"; expected type "str"
+tmp/m.py:8: error: Invalid index type "int" for "A"; expected type "str"
+
+
+-- Unary operators
+-- ---------------
+
+
+[case testUnaryMinus]
+
+a, b = None, None # type: (A, B)
+a = -a   # Fail
+b = -b   # Fail
+b = -a
+
+class A:
+    def __neg__(self) -> 'B':
+        pass
+class B:
+    pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:4: error: Unsupported operand type for unary - ("B")
+
+[case testUnaryPlus]
+
+a, b = None, None # type: (A, B)
+a = +a   # Fail
+b = +b   # Fail
+b = +a
+
+class A:
+    def __pos__(self) -> 'B':
+        pass
+class B:
+    pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:4: error: Unsupported operand type for unary + ("B")
+
+[case testUnaryNot]
+
+a, b = None, None # type: (A, bool)
+a = not b  # Fail
+b = not a
+b = not b
+class A:
+    pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testUnaryBitwiseNeg]
+
+a, b = None, None # type: (A, B)
+a = ~a   # Fail
+b = ~b   # Fail
+b = ~a
+
+class A:
+    def __invert__(self) -> 'B':
+        pass
+class B:
+    pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:4: error: Unsupported operand type for ~ ("B")
+
+
+-- Indexing
+-- --------
+
+
+[case testIndexing]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a[c]  # Fail
+a = a[b]  # Fail
+c = b[a]  # Fail
+c = a[b]
+
+class A:
+    def __getitem__(self, x: 'B') -> 'C':
+        pass
+class B: pass
+class C: pass
+[out]
+main:3: error: Invalid index type "C" for "A"; expected type "B"
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Value of type "B" is not indexable
+
+[case testIndexingAsLvalue]
+
+a, b, c = None, None, None # type: (A, B, C)
+a[c] = c  # Fail
+a[b] = a  # Fail
+b[a] = c  # Fail
+a[b] = c
+
+class A:
+    def __setitem__(self, x: 'B', y: 'C') -> None:
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Invalid index type "C" for "A"; expected type "B"
+main:4: error: Incompatible types in assignment (expression has type "A", target has type "C")
+main:5: error: Unsupported target for indexed assignment
+
+[case testOverloadedIndexing]
+from foo import *
+[file foo.pyi]
+from typing import overload
+
+a, b, c = None, None, None  # type: (A, B, C)
+a[b]
+a[c]
+a[1]  # E: No overload variant of "__getitem__" of "A" matches argument types [builtins.int]
+
+i, s = None, None  # type: (int, str)
+i = a[b]
+s = a[b]  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+i = a[c]  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+s = a[c]
+
+class A:
+    @overload
+    def __getitem__(self, x: 'B') -> int:
+        pass
+    @overload
+    def __getitem__(self, x: 'C') -> str:
+        pass
+class B: pass
+class C: pass
+[out]
+
+
+-- Cast expression
+-- ---------------
+
+
+[case testCastExpressions]
+from typing import cast, Any
+class A: pass
+class B: pass
+class C(A): pass
+a, b, c = None, None, None # type: (A, B, C)
+
+a = cast(A, a())       # E: "A" not callable
+a = cast(Any, a())     # E: "A" not callable
+b = cast(A, a)         # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a = cast(A, b)
+a = cast(A, a)
+c = cast(C, a)
+a = cast(A, c)
+a = cast(Any, b)
+b = cast(Any, a)
+[out]
+
+[case testAnyCast]
+from typing import cast, Any
+a, b = None, None # type: (A, B)
+a = cast(Any, a())     # Fail
+a = cast(Any, b)
+b = cast(Any, a)
+class A: pass
+class B: pass
+[out]
+main:3: error: "A" not callable
+
+
+-- None return type
+-- ----------------
+
+
+[case testNoneReturnTypeBasics]
+
+a, o = None, None # type: (A, object)
+a = f()         # Fail
+o = A().g(a)    # Fail
+A().g(f())      # Fail
+x = f() # type: A # Fail
+f()
+A().g(a)
+
+def f() -> None:
+    pass
+
+class A:
+    def g(self, x: object) -> None:
+        pass
+[out]
+main:3: error: "f" does not return a value
+main:4: error: "g" of "A" does not return a value
+main:5: error: "f" does not return a value
+main:6: error: "f" does not return a value
+
+[case testNoneReturnTypeWithStatements]
+import typing
+if f():   # Fail
+    pass
+elif f(): # Fail
+    pass
+while f(): # Fail
+    pass
+def g() -> object:
+    return f() # Fail
+raise f() # Fail
+
+def f() -> None: pass
+[builtins fixtures/exception.pyi]
+[out]
+main:2: error: "f" does not return a value
+main:4: error: "f" does not return a value
+main:6: error: "f" does not return a value
+main:9: error: "f" does not return a value
+main:10: error: "f" does not return a value
+
+[case testNoneReturnTypeWithExpressions]
+from typing import cast
+a = None # type: A
+[f()]       # E: "f" does not return a value
+f() + a     # E: "f" does not return a value
+a + f()     # E: "f" does not return a value
+f() == a    # E: "f" does not return a value
+a != f()    # E: "f" does not return a value
+cast(A, f())
+f().foo     # E: "f" does not return a value
+
+def f() -> None: pass
+class A:
+    def __add__(self, x: 'A') -> 'A': pass
+[builtins fixtures/list.pyi]
+
+[case testNoneReturnTypeWithExpressions2]
+import typing
+
+a, b = None, None # type: (A, bool)
+f() in a   # E: "f" does not return a value  # E: Unsupported right operand type for in ("A")
+a < f()    # E: "f" does not return a value
+f() <= a   # E: "f" does not return a value
+a in f()   # E: "f" does not return a value
+-f()       # E: "f" does not return a value
+not f()    # E: "f" does not return a value
+f() and b  # E: "f" does not return a value
+b or f()   # E: "f" does not return a value
+
+def f() -> None: pass
+class A:
+    def __add__(self, x: 'A') -> 'A':
+        pass
+[builtins fixtures/bool.pyi]
+
+-- Slicing
+-- -------
+
+
+[case testGetSlice]
+
+a, b = None, None # type: (A, B)
+a = a[1:2] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = a[1:]  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = a[:2]  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = a[:]   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+b = a[1:2]
+b = a[1:]
+b = a[:2]
+b = a[:]
+
+class A:
+  def __getitem__(self, s: slice) -> 'B': pass
+class B: pass
+[builtins fixtures/slice.pyi]
+
+[case testSlicingWithInvalidBase]
+
+a = None # type: A
+a[1:2] # E: Invalid index type "slice" for "A"; expected type "int"
+a[:]   # E: Invalid index type "slice" for "A"; expected type "int"
+class A:
+  def __getitem__(self, n: int) -> 'A': pass
+[builtins fixtures/slice.pyi]
+
+[case testSlicingWithNonindexable]
+
+o = None # type: object
+o[1:2] # E: Value of type "object" is not indexable
+o[:]   # E: Value of type "object" is not indexable
+[builtins fixtures/slice.pyi]
+
+[case testNonIntSliceBounds]
+from typing import Any
+a, o = None, None # type: (Any, object)
+a[o:1] # E: Slice index must be an integer or None
+a[1:o] # E: Slice index must be an integer or None
+a[o:]  # E: Slice index must be an integer or None
+a[:o]  # E: Slice index must be an integer or None
+[builtins fixtures/slice.pyi]
+
+[case testNoneSliceBounds]
+from typing import Any
+a = None # type: Any
+a[None:1]
+a[1:None]
+a[None:]
+a[:None]
+[builtins fixtures/slice.pyi]
+
+[case testNoneSliceBoundsWithStrictOptional]
+# flags: --strict-optional
+from typing import Any
+a = None # type: Any
+a[None:1]
+a[1:None]
+a[None:]
+a[:None]
+[builtins fixtures/slice.pyi]
+
+
+-- String interpolation
+-- --------------------
+
+
+[case testStringInterpolationType]
+from typing import Tuple
+i, f, s, t = None, None, None, None # type: (int, float, str, Tuple[int])
+'%d' % i
+'%f' % f
+'%s' % s
+'%d' % (f,)
+'%d' % (s,) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+'%d' % t
+'%d' % s  # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+'%f' % s  # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationSAcceptsAnyType]
+from typing import Any
+i, o, s = None, None, None # type: (int, object, str)
+'%s %s %s' % (i, o, s)
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationCount]
+'%d %d' % 1  # E: Not enough arguments for format string
+'%d %d' % (1, 2)
+'%d %d' % (1, 2, 3)  # E: Not all arguments converted during string formatting
+t = 1, 's'
+'%d %s' % t
+'%s %d' % t  # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+'%d' % t  # E: Not all arguments converted during string formatting
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationWithAnyType]
+from typing import Any
+a = None # type: Any
+'%d %d' % a
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationInvalidPlaceholder]
+'%W' % 1  # E: Unsupported format character 'W'
+'%b' % 1  # E: Format character 'b' is only supported on bytes patterns
+
+[case testStringInterPolationPython2]
+# flags: --python-version 2.7
+b'%b' % 1  # E: Format character 'b' is only supported in Python 3.5 and later
+b'%s' % 1
+b'%a' % 1  # E: Format character 'a' is only supported in Python 3
+
+[case testBytesInterpolationBefore35]
+# flags: --python-version 3.4
+b'%b' % 1  # E: Unsupported left operand type for % ("bytes")
+
+[case testBytesInterpolation]
+b'%b' % 1  # E: Incompatible types in string interpolation (expression has type "int", placeholder has type "bytes")
+b'%b' % b'1'
+b'%a' % 3
+
+[case testStringInterpolationWidth]
+'%2f' % 3.14
+'%*f' % 3.14 # E: Not enough arguments for format string
+'%*f' % (4, 3.14)
+'%*f' % (1.1, 3.14) # E: * wants int
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationPrecision]
+'%.2f' % 3.14
+'%.*f' % 3.14 # E: Not enough arguments for format string
+'%.*f' % (4, 3.14)
+'%.*f' % (1.1, 3.14) # E: * wants int
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationWidthAndPrecision]
+'%4.2f' % 3.14
+'%4.*f' % 3.14 # E: Not enough arguments for format string
+'%*.2f' % 3.14 # E: Not enough arguments for format string
+'%*.*f' % 3.14 # E: Not enough arguments for format string
+'%*.*f' % (4, 2, 3.14)
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationFlagsAndLengthModifiers]
+'%04hd' % 1
+'%-.4ld' % 1
+'%+*Ld' % (1, 1)
+'% .*ld' % (1, 1)
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationDoublePercentage]
+'%% %d' % 1
+'%3% %d' % 1
+'%*%' % 1
+'%*% %d' % 1  # E: Not enough arguments for format string
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationC]
+'%c' % 1
+'%c' % 's'
+'%c' % ''  # E: %c requires int or char
+'%c' % 'ab'  # E: %c requires int or char
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationMappingTypes]
+'%(a)d %(b)s' % {'a': 1, 'b': 's'}
+'%(a)d %(b)s' % {'a': 's', 'b': 1}  # E: Incompatible types in string interpolation (expression has type "str", placeholder with key 'a' has type "Union[int, float]")
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationMappingKeys]
+'%()d' % {'': 2}
+'%(a)d' % {'a': 1, 'b': 2, 'c': 3}
+'%(q)d' % {'a': 1, 'b': 2, 'c': 3}  # E: Key 'q' not found in mapping
+'%(a)d %%' % {'a': 1}
+
+[builtins fixtures/dict.pyi]
+
+[case testStringInterpolationMappingDictTypes]
+from typing import Any, Dict
+a = None # type: Any
+ds, do, di = None, None, None # type: Dict[str, int], Dict[object, int], Dict[int, int]
+'%(a)' % 1  # E: Format requires a mapping (expression has type "int", expected type for mapping is Dict[Any, Any])
+'%()d' % a
+'%()d' % ds
+'%()d' % do
+[builtins fixtures/dict.pyi]
+
+[case testStringInterpolationMappingInvalidDictTypes-skip]
+from typing import Any, Dict
+di = None # type: Dict[int, int]
+'%()d' % di  # E: Format requires a mapping (expression has type Dict[int, int], expected type for mapping is Dict[str, Any])
+[builtins fixtures/dict.pyi]
+
+[case testStringInterpolationMappingInvalidSpecifiers]
+'%(a)d %d' % 1  # E: String interpolation mixes specifier with and without mapping keys
+'%(b)*d' % 1  # E: String interpolation contains both stars and mapping keys
+'%(b).*d' % 1  # E: String interpolation contains both stars and mapping keys
+
+[case testStringInterpolationMappingFlagsAndLengthModifiers]
+'%(a)1d' % {'a': 1}
+'%(a).1d' % {'a': 1}
+'%(a)#1.1ld' % {'a': 1}
+[builtins fixtures/dict.pyi]
+
+[case testStringInterpolationFloatPrecision]
+'%.f' % 1.2
+'%.3f' % 1.2
+'%.f' % 'x'
+'%.3f' % 'x'
+[builtins fixtures/primitives.pyi]
+[out]
+main:3: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+main:4: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+
+[case testStringInterpolationSpaceKey]
+'%( )s' % {' ': 'foo'}
+
+[case testByteByteInterpolation]
+def foo(a: bytes, b: bytes):
+    b'%s:%s' % (a, b)
+foo(b'a', b'b') == b'a:b'
+
+[case testStringInterpolationStarArgs]
+x = (1, 2)
+"%d%d" % (*x,)
+
+[case testBytePercentInterpolationSupported]
+b'%s' % (b'xyz',)
+b'%(name)s' % {'name': 'jane'}
+b'%c' % (123)
+
+[case testUnicodeInterpolation_python2]
+u'%s' % (u'abc',)
+
+-- Lambdas
+-- -------
+
+
+[case testTrivialLambda]
+from typing import Callable
+f = lambda: 1 # type: Callable[[], int]
+f = lambda: ''.x
+f = lambda: ''
+[out]
+main:3: error: "str" has no attribute "x"
+main:4: error: Incompatible types in assignment (expression has type Callable[[], str], variable has type Callable[[], int])
+main:4: error: Incompatible return value type (got "str", expected "int")
+
+[case testVoidLambda]
+import typing
+def void() -> None:
+    pass
+x = lambda: void() # type: typing.Callable[[], None]
+
+[case testNoCrashOnLambdaGenerator]
+from typing import Iterator, Callable
+
+# These should not crash
+lambda: (yield)
+
+gen: Callable[[], Iterator[str]]
+gen = (lambda: (yield 1))  # E: Incompatible types in yield (actual type "int", expected type "str")
+
+def fun(cb: Callable[[], Iterator[str]]) -> None:
+    pass
+fun(lambda: (yield from [1]))  # E: Incompatible types in "yield from" (actual type "int", expected type "str")
+[builtins fixtures/list.pyi]
+[out]
+
+-- List comprehensions
+-- -------------------
+
+
+[case testSimpleListComprehension]
+from typing import List
+a = None # type: List[A]
+a = [x for x in a]
+b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+
+[case testSimpleListComprehensionNestedTuples]
+from typing import List, Tuple
+l = None # type: List[Tuple[A, Tuple[A, B]]]
+a = [a2 for a1, (a2, b1) in l] # type: List[A]
+b = [a2 for a1, (a2, b1) in l] # type: List[B] # E: List comprehension has incompatible type List[A]
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+
+[case testSimpleListComprehensionNestedTuples2]
+from typing import List, Tuple
+l = None # type: List[Tuple[int, Tuple[int, str]]]
+a = [f(d) for d, (i, s) in l]
+b = [f(s) for d, (i, s) in l] # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+def f(x: int): pass
+[builtins fixtures/for.pyi]
+
+[case testListComprehensionWithNonDirectMapping]
+from typing import List
+a = None # type: List[A]
+b = None # type: List[B]
+b = [f(x) for x in a]
+a = [f(x) for x in a] # E: List comprehension has incompatible type List[B]
+([f(x) for x in b])   # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+class A: pass
+class B: pass
+def f(a: A) -> B: pass
+[builtins fixtures/for.pyi]
+
+[case testErrorInListComprehensionCondition]
+from typing import List
+a = None # type: List[A]
+a = [x for x in a if x()] # E: "A" not callable
+class A: pass
+[builtins fixtures/for.pyi]
+
+[case testTypeInferenceOfListComprehension]
+from typing import List
+a = None # type: List[A]
+o = [x for x in a] # type: List[object]
+class A: pass
+[builtins fixtures/for.pyi]
+
+[case testSimpleListComprehensionInClassBody]
+from typing import List
+class A:
+    a = None # type: List[A]
+    a = [x for x in a]
+    b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]
+class B: pass
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Set comprehension
+-- -----------------
+
+
+[case testSimpleSetComprehension]
+from typing import Set
+a = None # type: Set[A]
+a = {x for x in a}
+b = {x for x in a} # type: Set[B] # E: Set comprehension has incompatible type Set[A]
+class A: pass
+class B: pass
+[builtins fixtures/set.pyi]
+
+
+-- Dictionary comprehension
+-- ------------------------
+
+
+[case testSimpleDictionaryComprehension]
+from typing import Dict, List, Tuple
+abd = None # type: Dict[A, B]
+abl = None # type: List[Tuple[A, B]]
+abd = {a: b for a, b in abl}
+x = {a: b for a, b in abl} # type: Dict[B, A]
+y = {a: b for a, b in abl} # type: A
+class A: pass
+class B: pass
+[builtins fixtures/dict.pyi]
+[out]
+main:5: error: Key expression in dictionary comprehension has incompatible type "A"; expected type "B"
+main:5: error: Value expression in dictionary comprehension has incompatible type "B"; expected type "A"
+main:6: error: Incompatible types in assignment (expression has type Dict[A, B], variable has type "A")
+
+
+[case testDictionaryComprehensionWithNonDirectMapping]
+from typing import Dict, List, Tuple
+abd = None # type: Dict[A, B]
+abl = None # type: List[Tuple[A, B]]
+abd = {a: f(b) for a, b in abl}
+class A: pass
+class B: pass
+class C: pass
+def f(b: A) -> C: pass
+[builtins fixtures/dict.pyi]
+[out]
+main:4: error: Value expression in dictionary comprehension has incompatible type "C"; expected type "B"
+main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+
+
+-- Generator expressions
+-- ---------------------
+
+
+[case testSimpleGeneratorExpression]
+from typing import Iterator
+# The implementation is mostly identical to list comprehensions, so only a few
+# test cases is ok.
+a = None # type: Iterator[int]
+a = (x for x in a)
+b = None # type: Iterator[str]
+b = (x for x in a) # E: Generator has incompatible item type "int"
+[builtins fixtures/for.pyi]
+
+[case testGeneratorIncompatibleErrorMessage]
+from typing import Callable, Iterator, List
+
+a = []  # type: List[Callable[[], str]]
+b = None  # type: Iterator[Callable[[], int]]
+b = (x for x in a)  # E: Generator has incompatible item type Callable[[], str]
+[builtins fixtures/list.pyi]
+
+-- Conditional expressions
+-- -----------------------
+
+
+[case testSimpleConditionalExpression]
+import typing
+y = ''
+x = 1 if y else 2
+x = 3
+x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testConditionalExpressionWithEmptyCondition]
+import typing
+def f() -> None: pass
+x = 1 if f() else 2 # E: "f" does not return a value
+
+[case testConditionalExpressionWithSubtyping]
+import typing
+class A: pass
+class B(A): pass
+x = B() if bool() else A()
+x = A()
+x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+y = A() if bool() else B()
+y = A()
+y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+[builtins fixtures/bool.pyi]
+
+[case testConditionalExpressionAndTypeContext]
+import typing
+x = [1] if bool() else []
+x = [1]
+x = ['x'] # E: List item 0 has incompatible type "str"
+[builtins fixtures/list.pyi]
+
+
+-- Special cases
+-- -------------
+
+
+[case testOperationsWithNonInstanceTypes]
+from typing import cast
+class A:
+    def __add__(self, a: 'A') -> 'A': pass
+a = None # type: A
+None + a   # Fail
+f + a      # Fail
+a + f      # Fail
+cast(A, f)
+
+def f() -> None:
+    pass
+[out]
+main:5: error: Unsupported left operand type for + (None)
+main:6: error: Unsupported left operand type for + (Callable[[], None])
+main:7: error: Unsupported operand types for + ("A" and Callable[[], None])
+
+[case testOperatorMethodWithInvalidArgCount]
+
+a = None # type: A
+a + a  # Fail
+
+class A:
+    def __add__(self) -> 'A':
+        pass
+[out]
+main:3: error: Too many arguments for "__add__" of "A"
+
+[case testOperatorMethodAsVar]
+from typing import Any
+class A:
+    def __init__(self, _add: Any) -> None:
+        self.__add__ = _add
+a = None # type: A
+a + a
+[out]
+
+[case testOperatorMethodAsVar2]
+
+class A:
+    def f(self, x: int) -> str: pass
+    __add__ = f
+s = None  # type: str
+s = A() + 1
+A() + (A() + 1)
+[out]
+main:7: error: Argument 1 has incompatible type "str"; expected "int"
+
+[case testIndexedLvalueWithSubtypes]
+
+a, b, c = None, None, None # type: (A, B, C)
+a[c] = c
+a[b] = c
+a[c] = b
+
+class A:
+    def __setitem__(self, x: 'B', y: 'B') -> None:
+        pass
+class B:
+    pass
+class C(B):
+    pass
+[out]
+
+
+-- Ellipsis
+-- --------
+
+
+[case testEllipsis]
+
+a = None # type: A
+a = ...  # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "A")
+b = ...
+c = ...
+b = c
+....__class__
+....a  # E: "ellipsis" has no attribute "a"
+
+class A: pass
+[file builtins.py]
+class object:
+    def __init__(self): pass
+class ellipsis:
+    def __init__(self): pass
+    __class__ = object()
+class type: pass
+class function: pass
+class str: pass
+[out]
+
+
+-- Yield expression
+-- ----------------
+
+
+[case testYieldExpression]
+def f(x: int) -> None:
+    x = yield f('')
+    x = 1
+[builtins fixtures/for.pyi]
+[out]
+main:1: error: The return type of a generator function should be "Generator" or one of its supertypes
+main:2: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+main:2: error: "f" does not return a value
+
+[case testYieldExpressionWithNone]
+from typing import Iterator
+def f(x: int) -> Iterator[None]:
+    (yield)
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Yield from expression
+-- ----------------
+
+
+[case testYieldFromIteratorHasNoValue]
+from typing import Iterator
+def f() -> Iterator[int]:
+    yield 5
+def g() -> Iterator[int]:
+    a = yield from f()
+[out]
+main:5: error: Function does not return a value
+
+[case testYieldFromGeneratorHasValue]
+from typing import Iterator, Generator
+def f() -> Generator[int, None, str]:
+    yield 5
+    return "ham"
+def g() -> Iterator[int]:
+    a = "string"
+    a = yield from f()
+[out]
+
+
+-- dict(...)
+-- ---------
+
+
+-- Note that the stub used in unit tests does not have all overload
+-- variants, but it should not matter.
+
+[case testDictWithKeywordArgsOnly]
+from typing import Dict, Any
+d1 = dict(a=1, b=2) # type: Dict[str, int]
+d2 = dict(a=1, b='') # type: Dict[str, int] # E: Dict entry 1 has incompatible type "str": "str"
+d3 = dict(a=1) # type: Dict[int, int] # E: Dict entry 0 has incompatible type "str": "int"
+d4 = dict(a=1, b=1)
+d4.xyz # E: Dict[str, int] has no attribute "xyz"
+d5 = dict(a=1, b='') # type: Dict[str, Any]
+[builtins fixtures/dict.pyi]
+
+[case testDictWithoutKeywordArgs]
+from typing import Dict
+d = dict() # E: Need type annotation for variable
+d2 = dict() # type: Dict[int, str]
+dict(undefined) # E: Name 'undefined' is not defined
+[builtins fixtures/dict.pyi]
+
+[case testDictFromList]
+from typing import Dict
+d = dict([(1, 'x'), (2, 'y')])
+d() # E: Dict[int, str] not callable
+d2 = dict([(1, 'x')]) # type: Dict[str, str] # E: List item 0 has incompatible type "Tuple[int, str]"
+[builtins fixtures/dict.pyi]
+
+[case testDictFromIterableAndKeywordArg]
+from typing import Dict
+it = [('x', 1)]
+
+d = dict(it, x=1)
+d() # E: Dict[str, int] not callable
+
+d2 = dict(it, x='') # E: Cannot infer type argument 2 of "dict"
+d2() # E: Dict[Any, Any] not callable
+
+d3 = dict(it, x='') # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testDictFromIterableAndKeywordArg2]
+it = [(1, 'x')]
+dict(it, x='y') # E: Keyword argument only valid with "str" key type in call to "dict"
+[builtins fixtures/dict.pyi]
+
+[case testDictFromIterableAndKeywordArg3]
+d = dict([], x=1)
+d() # E: Dict[str, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testDictFromIterableAndStarStarArgs]
+from typing import Dict
+it = [('x', 1)]
+
+kw = {'x': 1}
+d = dict(it, **kw)
+d() # E: Dict[str, int] not callable
+
+kw2 = {'x': ''}
+d2 = dict(it, **kw2) # E: Cannot infer type argument 2 of "dict"
+d2() # E: Dict[Any, Any] not callable
+
+d3 = dict(it, **kw2) # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type **Dict[str, str]; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testDictFromIterableAndStarStarArgs2]
+it = [(1, 'x')]
+kw = {'x': 'y'}
+d = dict(it, **kw) # E: Keyword argument only valid with "str" key type in call to "dict"
+d() # E: Dict[int, str] not callable
+[builtins fixtures/dict.pyi]
+
+[case testUserDefinedClassNamedDict]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+class dict(Generic[T, S]):
+    def __init__(self, x: T, **kwargs: T) -> None: pass
+dict(1, y=1)
+[builtins fixtures/dict.pyi]
+
+[case testSpecialSignatureForSubclassOfDict]
+from typing import TypeVar, Dict, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class D1(dict): pass # Implicit base class Dict[Any, Any]
+D1([(1, 2)], x=1)
+class D2(Dict[T, S], Generic[T, S]): pass
+da = D2([('x', 2)], x=1)
+da() # E: D2[str, int] not callable
+D2([(1, 2)], x=1) # E: Keyword argument only valid with "str" key type in call to "dict"
+db = D2(x=1)
+db() # E: D2[str, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testSpecialSignatureForSubclassOfDict2]
+from typing import TypeVar, Dict, Generic
+T = TypeVar('T')
+class D(Dict[str, T], Generic[T]): pass
+D([('x', 1)], x=1)
+[builtins fixtures/dict.pyi]
+
+[case testOverridingSpecialSignatureInSubclassOfDict]
+from typing import TypeVar, Dict, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class D(Dict[T, S], Generic[T, S]):
+    def __init__(self, x: S, y: T) -> None: pass
+d = D(1, y='')
+d() # E: D[str, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testRevealType]
+reveal_type(1) # E: Revealed type is 'builtins.int'
+
+[case testUndefinedRevealType]
+reveal_type(x)
+[out]
+main:1: error: Revealed type is 'Any'
+main:1: error: Name 'x' is not defined
+
+[case testUserDefinedRevealType]
+def reveal_type(x: int) -> None: pass
+reveal_type("foo") # E: Argument 1 to "reveal_type" has incompatible type "str"; expected "int"
+
+[case testRevealTypeVar]
+reveal_type = 1
+1 + "foo" # E: Unsupported operand types for + ("int" and "str")
+
+[case testRevealForward]
+def f() -> None:
+    reveal_type(x)
+x = 1 + 1
+[out]
+main:2: error: Revealed type is 'builtins.int'
+
+[case testEqNone]
+None == None
+[builtins fixtures/ops.pyi]
+
+[case testLtNone]
+None < None  # E: Unsupported left operand type for < (None)
+[builtins fixtures/ops.pyi]
+
+[case testDictWithStarExpr]
+
+b = {'z': 26, *a}  # E: invalid syntax
+[builtins fixtures/dict.pyi]
+
+[case testDictWithStarStarExpr]
+
+from typing import Dict
+a = {'a': 1}
+b = {'z': 26, **a}
+c = {**b}
+d = {**a, **b, 'c': 3}
+e = {1: 'a', **a}  # E: Argument 1 to "update" of "dict" has incompatible type Dict[str, int]; expected Mapping[int, str]
+f = {**b}  # type: Dict[int, int]  # E: List item 0 has incompatible type Dict[str, int]
+[builtins fixtures/dict.pyi]
+
+[case testDictIncompatibleTypeErrorMessage]
+from typing import Dict, Callable
+
+def things() -> int:
+    return 42
+
+stuff: Dict[int, Callable[[], str]] = {  # E: Dict entry 0 has incompatible type "int": Callable[[], int]
+    1: things
+}
+[builtins fixtures/dict.pyi]
+
+-- Type checker default plugin
+-- ---------------------------
+
+
+[case testIntPow]
+a = 1
+b = a + 2
+reveal_type(a**0) # E: Revealed type is 'builtins.int'
+reveal_type(a**1) # E: Revealed type is 'builtins.int'
+reveal_type(a**2) # E: Revealed type is 'builtins.int'
+reveal_type(a**-0) # E: Revealed type is 'builtins.int'
+reveal_type(a**-1) # E: Revealed type is 'builtins.float'
+reveal_type(a**(-2)) # E: Revealed type is 'builtins.float'
+reveal_type(a**b) # E: Revealed type is 'Any'
+reveal_type(a.__pow__(2)) # E: Revealed type is 'builtins.int'
+reveal_type(a.__pow__(a)) # E: Revealed type is 'Any'
+a.__pow__() # E: Too few arguments for "__pow__" of "int"
+[builtins fixtures/ops.pyi]
diff --git a/test-data/unit/check-fastparse.test b/test-data/unit/check-fastparse.test
new file mode 100644
index 0000000..7aa1521
--- /dev/null
+++ b/test-data/unit/check-fastparse.test
@@ -0,0 +1,366 @@
+[case testFastParseSyntaxError]
+
+1 +  # E: invalid syntax
+
+[case testFastParseTypeCommentSyntaxError]
+
+x = None # type: a : b  # E: syntax error in type comment
+
+[case testFastParseInvalidTypeComment]
+
+x = None # type: a + b  # E: invalid type comment or annotation
+
+-- Function type comments are attributed to the function def line.
+-- This happens in both parsers.
+[case testFastParseFunctionAnnotationSyntaxError]
+
+def f():  # E: syntax error in type comment
+  # type: None -> None
+  pass
+
+[case testFastParseInvalidFunctionAnnotation]
+
+def f(x):  # E: invalid type comment or annotation
+  # type: (a + b) -> None
+  pass
+
+[case testFastParseInvalidTypes2]
+# flags: --py2
+# All of these should not crash
+from typing import Callable, Tuple, Iterable
+
+x = None # type: Tuple[int, str].x # E: invalid type comment or annotation
+x = None # type: Iterable[x].x # E: invalid type comment or annotation
+x = None # type: Tuple[x][x] # E: invalid type comment or annotation
+x = None # type: Iterable[x][x] # E: invalid type comment or annotation
+x = None # type: Callable[..., int][x] # E: invalid type comment or annotation
+x = None # type: Callable[..., int].x # E: invalid type comment or annotation
+x = None # type: Tuple[1] # E: invalid type comment or annotation
+
+def f1(x): # E: invalid type comment or annotation
+    # type: (Tuple[int, str].x) -> None
+    pass
+def f2(x): # E: invalid type comment or annotation
+    # type: (Iterable[x].x) -> None
+    pass
+def f3(x): # E: invalid type comment or annotation
+    # type: (Tuple[x][x]) -> None
+    pass
+def f4(x): # E: invalid type comment or annotation
+    # type: (Iterable[x][x]) -> None
+    pass
+def f5(x): # E: invalid type comment or annotation
+    # type: (Callable[..., int][x]) -> None
+    pass
+def f6(x): # E: invalid type comment or annotation
+    # type: (Callable[..., int].x) -> None
+    pass
+def f7(x): # E: invalid type comment or annotation
+    # type: (Tuple[1]) -> None
+    pass
+
+
+[case testFastParseInvalidTypes3]
+# flags: --python-version 3.6
+# All of these should not crash
+from typing import Callable, Tuple, Iterable
+
+x: Tuple[int, str].x # E: invalid type comment or annotation
+x: Iterable[x].x # E: invalid type comment or annotation
+x: Tuple[x][x] # E: invalid type comment or annotation
+x: Iterable[x][x] # E: invalid type comment or annotation
+x: Callable[..., int][x] # E: invalid type comment or annotation
+x: Callable[..., int].x # E: invalid type comment or annotation
+x: Tuple[1] # E: invalid type comment or annotation
+
+x = None # type: Tuple[int, str].x # E: invalid type comment or annotation
+x = None # type: Iterable[x].x # E: invalid type comment or annotation
+x = None # type: Tuple[x][x] # E: invalid type comment or annotation
+x = None # type: Iterable[x][x] # E: invalid type comment or annotation
+x = None # type: Callable[..., int][x] # E: invalid type comment or annotation
+x = None # type: Callable[..., int].x # E: invalid type comment or annotation
+x = None # type: Tuple[1] # E: invalid type comment or annotation
+
+def f1(x: Tuple[int, str].x) -> None: pass # E: invalid type comment or annotation
+def f2(x: Iterable[x].x) -> None: pass # E: invalid type comment or annotation
+def f3(x: Tuple[x][x]) -> None: pass # E: invalid type comment or annotation
+def f4(x: Iterable[x][x]) -> None: pass # E: invalid type comment or annotation
+def f5(x: Callable[..., int][x]) -> None: pass # E: invalid type comment or annotation
+def f6(x: Callable[..., int].x) -> None: pass # E: invalid type comment or annotation
+def f7(x: Tuple[1]) -> None: pass # E: invalid type comment or annotation
+
+[case testFastParseProperty]
+
+class C:
+  @property
+  def x(self) -> str: pass
+  @x.setter
+  def x(self, value: str) -> None: pass
+[builtins fixtures/property.pyi]
+
+[case testFastParseConditionalProperty]
+
+class C:
+  if bool():
+    @property
+    def x(self) -> str: pass
+    @x.setter
+    def x(self, value: str) -> None: pass
+[builtins fixtures/property.pyi]
+
+[case testFastParsePerArgumentAnnotations]
+
+class A: pass
+class B: pass
+class C: pass
+class D: pass
+class E: pass
+class F: pass
+def f(a,        # type: A
+      b = None, # type: B
+      *args,    # type: C
+      d = None, # type: D
+      e,        # type: E
+      **kwargs  # type: F
+      ):
+    reveal_type(a)      # E: Revealed type is '__main__.A'
+    reveal_type(b)      # E: Revealed type is 'Union[__main__.B, builtins.None]'
+    reveal_type(args)   # E: Revealed type is 'builtins.tuple[__main__.C]'
+    reveal_type(d)      # E: Revealed type is 'Union[__main__.D, builtins.None]'
+    reveal_type(e)      # E: Revealed type is '__main__.E'
+    reveal_type(kwargs) # E: Revealed type is 'builtins.dict[builtins.str, __main__.F]'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFastParsePerArgumentAnnotationsWithReturn]
+
+class A: pass
+class B: pass
+class C: pass
+class D: pass
+class E: pass
+class F: pass
+def f(a,        # type: A
+      b = None, # type: B
+      *args,    # type: C
+      d = None, # type: D
+      e,        # type: E
+      **kwargs  # type: F
+      ):
+      # type: (...) -> int
+    reveal_type(a)      # E: Revealed type is '__main__.A'
+    reveal_type(b)      # E: Revealed type is 'Union[__main__.B, builtins.None]'
+    reveal_type(args)   # E: Revealed type is 'builtins.tuple[__main__.C]'
+    reveal_type(d)      # E: Revealed type is 'Union[__main__.D, builtins.None]'
+    reveal_type(e)      # E: Revealed type is '__main__.E'
+    reveal_type(kwargs) # E: Revealed type is 'builtins.dict[builtins.str, __main__.F]'
+    return "not an int"  # E: Incompatible return value type (got "str", expected "int")
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFastParsePerArgumentAnnotationsWithAnnotatedBareStar]
+
+def f(*, # type: int  # E: bare * has associated type comment
+      x  # type: str
+      ):
+      # type: (...) -> int
+    pass
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFastParsePerArgumentAnnotationsWithReturnAndBareStar]
+
+def f(*,
+      x  # type: str
+      ):
+      # type: (...) -> int
+    reveal_type(x) # E: Revealed type is 'builtins.str'
+    return "not an int"  # E: Incompatible return value type (got "str", expected "int")
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFastParsePerArgumentAnnotations_python2]
+
+class A: pass
+class B: pass
+class C: pass
+class D: pass
+def f(a,        # type: A
+      b = None, # type: B
+      *args     # type: C
+      # kwargs not tested due to lack of 2.7 dict fixtures
+      ):
+    reveal_type(a)      # E: Revealed type is '__main__.A'
+    reveal_type(b)      # E: Revealed type is 'Union[__main__.B, builtins.None]'
+    reveal_type(args)   # E: Revealed type is 'builtins.tuple[__main__.C]'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFastParsePerArgumentAnnotationsWithReturn_python2]
+
+class A: pass
+class B: pass
+class C: pass
+class D: pass
+def f(a,        # type: A
+      b = None, # type: B
+      *args     # type: C
+      # kwargs not tested due to lack of 2.7 dict fixtures
+      ):
+      # type: (...) -> int
+    reveal_type(a)      # E: Revealed type is '__main__.A'
+    reveal_type(b)      # E: Revealed type is 'Union[__main__.B, builtins.None]'
+    reveal_type(args)   # E: Revealed type is 'builtins.tuple[__main__.C]'
+    return "not an int"  # E: Incompatible return value type (got "str", expected "int")
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFasterParseTooManyArgumentsAnnotation]
+
+def f():  # E: Type signature has too many arguments
+    # type: (int) -> None
+    pass
+
+[case testFasterParseTooFewArgumentsAnnotation]
+
+def f(x):  # E: Type signature has too few arguments
+    # type: () -> None
+    pass
+
+[case testFasterParseTypeCommentError_python2]
+
+from typing import Tuple
+def f(a):
+    # type: (Tuple(int, int)) -> int
+    pass
+[out]
+main:3: error: invalid type comment or annotation
+
+[case testFastParseMatMul]
+
+from typing import Any
+x = None  # type: Any
+x @ 1
+x @= 1
+
+[case testIncorrectTypeCommentIndex]
+
+from typing import Dict
+x = None # type: Dict[x: y]
+[out]
+main:3: error: syntax error in type comment
+
+[case testPrintStatementTrailingCommaFastParser_python2]
+
+print 0,
+print 1, 2,
+
+[case testFastParserShowsMultipleErrors]
+def f(x):  # E: Type signature has too few arguments
+    # type: () -> None
+    pass
+def g():  # E: Type signature has too many arguments
+    # type: (int) -> None
+    pass
+
+[case testFastParseMalformedAssert]
+
+assert 1, 2
+assert (1, 2)  # W: Assertion is always true, perhaps remove parentheses?
+assert (1, 2), 3  # W: Assertion is always true, perhaps remove parentheses?
+assert ()
+assert (1,)  # W: Assertion is always true, perhaps remove parentheses?
+
+[case testFastParseAssertMessage]
+
+assert 1
+assert 1, 2
+assert 1, 1+2
+assert 1, 1+'test'  # E: Unsupported operand types for + ("int" and "str")
+assert 1, f()  # E: Name 'f' is not defined
+
+[case testFastParserConsistentFunctionTypes]
+
+def f(x, y, z):
+  # type: (int, int, int) -> int
+  pass
+
+def f(x,  # type: int  # E: Function has duplicate type signatures
+      y,  # type: int
+      z   # type: int
+    ):
+    # type: (int, int, int) -> int
+    pass
+
+def f(x,  # type: int
+      y,  # type: int
+      z   # type: int
+    ):
+    # type: (...) -> int
+    pass
+
+def f(x, y, z):
+  # type: (int, int, int) -> int
+  pass
+
+def f(x) -> int:  # E: Function has duplicate type signatures
+  # type: (int) -> int
+  pass
+
+def f(x: int, y: int, z: int):
+  # type: (...) -> int
+  pass
+
+def f(x: int):  # E: Function has duplicate type signatures
+  # type: (int) -> int
+  pass
+
+[case testFastParserDuplicateNames]
+
+def f(x, y, z):
+  pass
+
+def g(x, y, x):  # E: Duplicate argument 'x' in function definition
+  pass
+
+def h(x, y, *x):  # E: Duplicate argument 'x' in function definition
+  pass
+
+def i(x, y, *z, **z):  # E: Duplicate argument 'z' in function definition
+  pass
+
+def j(x: int, y: int, *, x: int = 3):  # E: Duplicate argument 'x' in function definition
+  pass
+
+def k(*, y, z, y):  # E: Duplicate argument 'y' in function definition
+  pass
+
+lambda x, y, x: ...  # E: Duplicate argument 'x' in function definition
+
+[case testFastParserDuplicateNames_python2]
+
+def f(x, y, z):
+  pass
+
+def g(x, y, x):  # E: Duplicate argument 'x' in function definition
+  pass
+
+def h(x, y, *x):  # E: Duplicate argument 'x' in function definition
+  pass
+
+def i(x, y, *z, **z):  # E: Duplicate argument 'z' in function definition
+  pass
+
+def j(x, (y, y), z):  # E: Duplicate argument 'y' in function definition
+  pass
+
+def k(x, (y, x)):  # E: Duplicate argument 'x' in function definition
+  pass
+
+def l((x, y), (z, x)):  # E: Duplicate argument 'x' in function definition
+  pass
+
+def m(x, ((x, y), z)):  # E: Duplicate argument 'x' in function definition
+  pass
+
+lambda x, (y, x): None  # E: Duplicate argument 'x' in function definition
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
new file mode 100644
index 0000000..dc856f6
--- /dev/null
+++ b/test-data/unit/check-flags.test
@@ -0,0 +1,900 @@
+[case testUnannotatedFunction]
+# flags: --disallow-untyped-defs
+def f(x): pass
+[out]
+main:2: error: Function is missing a type annotation
+
+[case testUnannotatedArgument]
+# flags: --disallow-untyped-defs
+def f(x) -> int: pass
+[out]
+main:2: error: Function is missing a type annotation for one or more arguments
+
+[case testUnannotatedArgumentWithFastParser]
+# flags: --disallow-untyped-defs
+def f(x) -> int: pass
+[out]
+main:2: error: Function is missing a type annotation for one or more arguments
+
+[case testNoArgumentFunction]
+# flags: --disallow-untyped-defs
+def f() -> int: pass
+[out]
+
+[case testUnannotatedReturn]
+# flags: --disallow-untyped-defs
+def f(x: int): pass
+[out]
+main:2: error: Function is missing a return type annotation
+
+[case testUnannotatedReturnWithFastParser]
+# flags: --disallow-untyped-defs
+def f(x: int): pass
+[out]
+main:2: error: Function is missing a return type annotation
+
+[case testLambda]
+# flags: --disallow-untyped-defs
+lambda x: x
+[out]
+
+[case testUntypedDef]
+# flags: --disallow-untyped-defs
+def f():
+    1 + "str"
+[out]
+main:2: error: Function is missing a type annotation
+[case testUntypedDefDisallowUnannotated]
+# flags: --disallow-any=unannotated
+def f():
+    1 + "str"
+[out]
+main:2: error: Function is missing a type annotation
+
+[case testSubclassingAny]
+# flags: --disallow-subclassing-any
+from typing import Any
+FakeClass = None  # type: Any
+class Foo(FakeClass): pass  # E: Class cannot subclass 'FakeClass' (has type 'Any')
+[out]
+
+[case testSubclassingAnyMultipleBaseClasses]
+# flags: --disallow-subclassing-any
+from typing import Any
+FakeClass = None  # type: Any
+class ActualClass: pass
+class Foo(ActualClass, FakeClass): pass  # E: Class cannot subclass 'FakeClass' (has type 'Any')
+[out]
+
+[case testSubclassingAnySilentImports]
+# flags: --disallow-subclassing-any --follow-imports=skip
+# cmd: mypy -m main
+
+[file main.py]
+from ignored_module import BaseClass
+class Foo(BaseClass): pass
+
+[file ignored_module.py]
+class BaseClass: pass
+
+[out]
+tmp/main.py:2: error: Class cannot subclass 'BaseClass' (has type 'Any')
+
+[case testSubclassingAnySilentImports2]
+# flags: --disallow-subclassing-any --follow-imports=skip
+# cmd: mypy -m main
+
+[file main.py]
+import ignored_module
+class Foo(ignored_module.BaseClass): pass
+
+[file ignored_module.py]
+class BaseClass: pass
+
+[out]
+tmp/main.py:2: error: Class cannot subclass 'BaseClass' (has type 'Any')
+
+[case testWarnNoReturnIgnoresTrivialFunctions]
+# flags: --warn-no-return
+def f() -> int:
+  pass
+def g() -> int:
+  ...
+def h() -> int:
+  """with docstring"""
+  pass
+def i() -> int:
+  """with docstring"""
+  ...
+def j() -> int:
+  u"""with unicode docstring"""
+  pass
+def k() -> int:
+  """docstring only"""
+
+[case testWarnNoReturnWorksWithAlwaysTrue]
+# flags: --warn-no-return
+PY3 = True
+def f() -> int:
+    if PY3:
+        return 0
+    else:
+        return 0
+[builtins fixtures/bool.pyi]
+
+[case testWarnNoReturnWorksWithAlwaysFalse]
+# flags: --warn-no-return
+PY2 = False
+def f() -> int:
+    if PY2:
+        return 0
+    else:
+        return 0
+[builtins fixtures/bool.pyi]
+
+[case testWarnNoReturnWorksWithMypyTrue]
+# flags: --warn-no-return
+MYPY = False
+def f() -> int:
+    if MYPY:
+        return 0
+    else:
+        return 0
+[builtins fixtures/bool.pyi]
+
+[case testNoReturnDisallowsReturn]
+# flags: --warn-no-return
+from mypy_extensions import NoReturn
+
+def f() -> NoReturn:
+  if bool():
+    return 5  # E: Return statement in function which does not return
+  else:
+    return  # E: Return statement in function which does not return
+[builtins fixtures/dict.pyi]
+
+[case testNoReturnWithoutImplicitReturn]
+# flags: --warn-no-return
+from mypy_extensions import NoReturn
+
+def no_return() -> NoReturn: pass
+def f() -> NoReturn:
+  no_return()
+[builtins fixtures/dict.pyi]
+
+[case testNoReturnDisallowsImplicitReturn]
+# flags: --warn-no-return
+from mypy_extensions import NoReturn
+
+def f() -> NoReturn:  # N: Implicit return in function which does not return
+  non_trivial_function = 1
+[builtins fixtures/dict.pyi]
+
+[case testNoReturnNoWarnNoReturn]
+# flags: --warn-no-return
+from mypy_extensions import NoReturn
+
+def no_return() -> NoReturn: pass
+def f() -> int:
+  if bool():
+    return 0
+  else:
+    no_return()
+[builtins fixtures/dict.pyi]
+
+[case testNoReturnInExpr]
+# flags: --warn-no-return
+from mypy_extensions import NoReturn
+
+def no_return() -> NoReturn: pass
+def f() -> int:
+  return 0
+reveal_type(f() or no_return())  # E: Revealed type is 'builtins.int'
+[builtins fixtures/dict.pyi]
+
+[case testNoReturnVariable]
+# flags: --warn-no-return
+from mypy_extensions import NoReturn
+
+x = 0  # type: NoReturn  # E: Incompatible types in assignment (expression has type "int", variable has type NoReturn)
+[builtins fixtures/dict.pyi]
+
+[case testNoReturnImportFromTyping]
+from typing import NoReturn
+
+def h() -> NoReturn:
+  if bool():
+    return 5  # E: Return statement in function which does not return
+  else:
+    return  # E: Return statement in function which does not return
+
+def no_return() -> NoReturn: pass
+def f() -> NoReturn:
+  no_return()
+
+x: NoReturn = 0 # E: Incompatible types in assignment (expression has type "int", variable has type NoReturn)
+[builtins fixtures/dict.pyi]
+
+[case testShowErrorContextFunction]
+# flags: --show-error-context
+def f() -> None:
+  0 + ""
+[out]
+main: note: In function "f":
+main:3: error: Unsupported operand types for + ("int" and "str")
+
+[case testShowErrorContextClass]
+# flags: --show-error-context
+class A:
+  0 + ""
+[out]
+main: note: In class "A":
+main:3: error: Unsupported operand types for + ("int" and "str")
+
+[case testShowErrorContextMember]
+# flags: --show-error-context
+class A:
+  def f(self, x: int) -> None:
+    self.f("")
+[out]
+main: note: In member "f" of class "A":
+main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testShowErrorContextModule]
+# flags: --show-error-context
+import m
+[file m.py]
+0 + ""
+[out]
+main:2: note: In module imported here:
+tmp/m.py:1: error: Unsupported operand types for + ("int" and "str")
+
+[case testShowErrorContextTopLevel]
+# flags: --show-error-context
+def f() -> None:
+  0 + ""
+0 + ""
+[out]
+main: note: In function "f":
+main:3: error: Unsupported operand types for + ("int" and "str")
+main: note: At top level:
+main:4: error: Unsupported operand types for + ("int" and "str")
+
+[case testShowErrorContextFromHere]
+# flags: --show-error-context
+import a
+[file a.py]
+import b
+[file b.py]
+0 + ""
+[out]
+tmp/a.py:1: note: In module imported here,
+main:2: note: ... from here:
+tmp/b.py:1: error: Unsupported operand types for + ("int" and "str")
+
+[case testFollowImportsNormal]
+# flags: --follow-imports=normal
+from mod import x
+x + ""
+[file mod.py]
+1 + ""
+x = 0
+[out]
+tmp/mod.py:1: error: Unsupported operand types for + ("int" and "str")
+main:3: error: Unsupported operand types for + ("int" and "str")
+
+[case testFollowImportsSilent]
+# flags: --follow-imports=silent
+from mod import x
+x + ""  # E: Unsupported operand types for + ("int" and "str")
+[file mod.py]
+1 + ""
+x = 0
+
+[case testFollowImportsSkip]
+# flags: --follow-imports=skip
+from mod import x
+x + ""
+[file mod.py]
+this deliberate syntax error will not be reported
+[out]
+
+[case testFollowImportsError]
+# flags: --follow-imports=error
+from mod import x
+x + ""
+[file mod.py]
+deliberate syntax error
+[out]
+main:2: note: Import of 'mod' ignored
+main:2: note: (Using --follow-imports=error, module not passed on command line)
+
+[case testIgnoreMissingImportsFalse]
+from mod import x
+[out]
+main:1: error: Cannot find module named 'mod'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testIgnoreMissingImportsTrue]
+# flags: --ignore-missing-imports
+from mod import x
+[out]
+
+[case testStrictBoolean]
+# flags: --strict-boolean
+if True:
+  pass
+if 'test':  # E: Condition must be a boolean
+  pass
+elif 1:  # E: Condition must be a boolean
+  pass
+
+def f() -> bool:
+  return True
+
+if f:  # E: Condition must be a boolean
+  pass
+
+if f():
+  pass
+
+class A:
+  def __call__(self) -> bool:
+    return False
+
+if A:  # E: Condition must be a boolean
+  pass
+
+if A():  # E: Condition must be a boolean
+  pass
+
+if A()():
+  pass
+[builtins fixtures/bool.pyi]
+
+[case testStrictBooleanTernary]
+# flags: --strict-boolean
+x = 1 if 'test' else 2  # E: Condition must be a boolean
+y = 1 if not 'test' else 2
+[builtins fixtures/bool.pyi]
+
+[case testStrictBooleanWhile]
+# flags: --strict-boolean
+while 5:  # E: Condition must be a boolean
+  pass
+
+while False:
+  pass
+[builtins fixtures/bool.pyi]
+
+[case testStrictBooleanComplexTypes]
+# flags: --strict-boolean
+from typing import Any, Type, Union
+
+x = True  # type: Any
+y = True  # type: Union[bool, int]
+z = int  # type: Type[int]
+
+if x:
+  pass
+if y:  # E: Condition must be a boolean
+  pass
+if z:  # E: Condition must be a boolean
+  pass
+[builtins fixtures/bool.pyi]
+
+
+[case testPerFileStrictOptionalBasic]
+# flags: --config-file tmp/mypy.ini
+import standard, optional
+
+[file standard.py]
+x = 0
+x = None
+[file optional.py]
+x = 0
+x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+
+[file mypy.ini]
+[[mypy]
+strict_optional = False
+[[mypy-optional]
+strict_optional = True
+
+
+[case testPerFileStrictOptionalBasicImportStandard]
+# flags: --config-file tmp/mypy.ini
+import standard, optional
+
+[file standard.py]
+from typing import Optional
+def f(x: int) -> None: pass
+an_int = 0  # type: int
+optional_int = None  # type: Optional[int]
+f(an_int)  # ints can be used as ints
+f(optional_int)  # optional ints can be used as ints in this file
+
+[file optional.py]
+import standard
+def f(x: int) -> None: pass
+standard.an_int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+standard.optional_int = None  # OK -- explicitly declared as optional
+f(standard.an_int)  # ints can be used as ints
+f(standard.optional_int)  # E: Argument 1 to "f" has incompatible type None; expected "int"
+
+[file mypy.ini]
+[[mypy]
+strict_optional = False
+[[mypy-optional]
+strict_optional = True
+
+
+[case testPerFileStrictOptionalBasicImportOptional]
+# flags: --config-file tmp/mypy.ini
+import standard, optional
+
+[file standard.py]
+import optional
+def f(x: int) -> None: pass
+f(optional.x)  # OK -- in non-strict Optional context
+f(optional.y)  # OK -- in non-strict Optional context
+
+[file optional.py]
+from typing import Optional
+def f(x: int) -> None: pass
+x = 0  # type: Optional[int]
+y = None  # type: None
+
+[file mypy.ini]
+[[mypy]
+strict_optional = False
+[[mypy-optional]
+strict_optional = True
+
+[case testPerFileStrictOptionalListItemImportOptional]
+# flags: --config-file tmp/mypy.ini
+import standard, optional
+
+[file standard.py]
+import optional
+from typing import List
+def f(x: List[int]) -> None: pass
+f(optional.x)  # OK -- in non-strict Optional context
+f(optional.y)  # OK -- in non-strict Optional context
+
+[file optional.py]
+from typing import Optional, List
+def f(x: List[int]) -> None: pass
+x = []  # type: List[Optional[int]]
+y = []  # type: List[int]
+
+[file mypy.ini]
+[[mypy]
+strict_optional = False
+[[mypy-optional]
+strict_optional = True
+[builtins fixtures/list.pyi]
+
+[case testPerFileStrictOptionalComplicatedList]
+from typing import Union, Optional, List
+
+def f() -> None:
+    x = [] # type: Union[List[Optional[str]], str]
+[builtins fixtures/list.pyi]
+
+[case testPerFileStrictOptionalNoneArguments]
+# flags: --config-file tmp/mypy.ini
+import standard, optional
+
+[file standard.py]
+def f(x: int = None) -> None: pass
+
+[file optional.py]
+import standard
+def f(x: int = None) -> None: pass
+standard.f(None)
+
+[file mypy.ini]
+[[mypy]
+strict_optional = False
+[[mypy-optional]
+strict_optional = True
+
+[case testDisallowImplicitTypesIgnoreMissingTypes]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from missing import MyType
+
+def f(x: MyType) -> None:  # E: Argument 1 to "f" becomes "Any" due to an unfollowed import
+    pass
+
+[case testDisallowImplicitTypes]
+# flags: --disallow-any=unimported
+from missing import MyType
+
+def f(x: MyType) -> None:
+    pass
+[out]
+main:2: error: Cannot find module named 'missing'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:4: error: Argument 1 to "f" becomes "Any" due to an unfollowed import
+
+[case testDisallowImplicitAnyVariableDefinition]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from missing import Unchecked
+
+t: Unchecked = 12  # E: Type of variable becomes "Any" due to an unfollowed import
+
+[case testDisallowImplicitAnyGeneric]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from missing import Unchecked
+from typing import List
+
+def foo(l: List[Unchecked]) -> List[Unchecked]:
+    t = []  # type: List[Unchecked]
+    return l
+[builtins fixtures/list.pyi]
+[out]
+main:5: error: Return type becomes List[Any] due to an unfollowed import
+main:5: error: Argument 1 to "foo" becomes List[Any] due to an unfollowed import
+main:6: error: Type of variable becomes List[Any] due to an unfollowed import
+
+[case testDisallowImplicitAnyInherit]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from missing import Unchecked
+from typing import List
+
+class C(Unchecked): # E: Base type Unchecked becomes "Any" due to an unfollowed import
+    pass
+
+class A(List[Unchecked]): # E: Base type becomes List[Any] due to an unfollowed import
+    pass
+[builtins fixtures/list.pyi]
+
+[case testDisallowImplicitAnyAlias]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from missing import Unchecked
+from typing import List
+
+X = List[Unchecked]
+
+def f(x: X) -> None:  # E: Argument 1 to "f" becomes List[Any] due to an unfollowed import
+    pass
+[builtins fixtures/list.pyi]
+
+[case testDisallowImplicitAnyCast]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from missing import Unchecked
+from typing import List, cast
+
+
+foo = [1, 2, 3]
+cast(List[Unchecked], foo)  # E: Target type of cast becomes List[Any] due to an unfollowed import
+cast(Unchecked, foo)  # E: Target type of cast becomes "Any" due to an unfollowed import
+[builtins fixtures/list.pyi]
+
+[case testDisallowImplicitAnyNamedTuple]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from typing import List, NamedTuple
+from missing import Unchecked
+
+Point = NamedTuple('Point', [('x', List[Unchecked]),
+                             ('y', Unchecked)])
+[builtins fixtures/list.pyi]
+[out]
+main:5: error: NamedTuple type becomes "Tuple[List[Any], Any]" due to an unfollowed import
+
+[case testDisallowImplicitAnyTypeVarConstraints]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from typing import List, NamedTuple, TypeVar, Any
+from missing import Unchecked
+
+T = TypeVar('T', Unchecked, List[Unchecked], str)
+[builtins fixtures/list.pyi]
+[out]
+main:5: error: Constraint 1 becomes "Any" due to an unfollowed import
+main:5: error: Constraint 2 becomes List[Any] due to an unfollowed import
+
+[case testDisallowImplicitAnyNewType]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from typing import NewType, List
+from missing import Unchecked
+
+Baz = NewType('Baz', Unchecked)  # E: Argument 2 to NewType(...) must be subclassable (got Any)
+Bar = NewType('Bar', List[Unchecked])  # E: Argument 2 to NewType(...) becomes List[Any] due to an unfollowed import
+
+[builtins fixtures/list.pyi]
+
+[case testDisallowImplicitAnyCallableAndTuple]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from typing import Callable, Tuple
+from missing import Unchecked
+
+def foo(f: Callable[[], Unchecked]) -> Tuple[Unchecked]:
+    return f()
+[builtins fixtures/list.pyi]
+[out]
+main:5: error: Return type becomes "Tuple[Any]" due to an unfollowed import
+main:5: error: Argument 1 to "foo" becomes Callable[[], Any] due to an unfollowed import
+
+[case testDisallowImplicitAnySubclassingExplicitAny]
+# flags: --ignore-missing-imports --disallow-any=unimported --disallow-subclassing-any
+from typing import Any
+
+class C(Any): # E: Class cannot subclass 'Any' (has type 'Any')
+    pass
+
+[case testDisallowImplicitAnyVarDeclaration]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from missing import Unchecked
+
+foo: Unchecked = ""
+foo = ""
+x, y = 1, 2  # type: Unchecked, Unchecked
+[out]
+main:4: error: Type of variable becomes "Any" due to an unfollowed import
+main:6: error: A type on this line becomes "Any" due to an unfollowed import
+
+[case testDisallowUnimportedAnyTypedDictSimple]
+# flags: --ignore-missing-imports --disallow-any=unimported
+from mypy_extensions import TypedDict
+from x import Unchecked
+
+M = TypedDict('M', {'x': str, 'y': Unchecked})  # E: Type of a TypedDict key becomes "Any" due to an unfollowed import
+
+def f(m: M) -> M: pass  # no error
+[builtins fixtures/dict.pyi]
+
+[case testDisallowUnimportedAnyTypedDictGeneric]
+# flags: --ignore-missing-imports --disallow-any=unimported
+
+from mypy_extensions import TypedDict
+from typing import List
+from x import Unchecked
+
+M = TypedDict('M', {'x': str, 'y': List[Unchecked]})  # E: Type of a TypedDict key becomes List[Any] due to an unfollowed import
+
+def f(m: M) -> M: pass  # no error
+[builtins fixtures/dict.pyi]
+
+[case testDisallowAnyDecoratedUnannotatedDecorator]
+# flags: --disallow-any=decorated
+from typing import Any
+
+def d(f):
+    return f
+
+ at d
+def f(x: Any) -> Any:  # E: Function is untyped after decorator transformation
+    pass
+ at d
+def h(x):  # E: Function is untyped after decorator transformation
+    pass
+[builtins fixtures/list.pyi]
+[case testDisallowAnyDecoratedErrorIsReportedOnlyOnce]
+# flags: --disallow-any=decorated
+
+def d(f):
+    return f
+
+def d2(f):
+    return f
+
+ at d
+ at d2
+ at d
+def f(x: int) -> None: pass  # E: Function is untyped after decorator transformation
+[case testDisallowAnyDecoratedReturnAny]
+# flags: --disallow-any=decorated
+from typing import Any
+
+def d(f) -> Any:
+    return f
+
+ at d
+def f() -> None: pass  # E: Function is untyped after decorator transformation
+[builtins fixtures/list.pyi]
+[case testDisallowAnyDecoratedReturnCallable]
+# flags: --disallow-any=decorated
+from typing import Any, Callable
+
+def d(f) -> Callable[..., None]:
+    return f
+
+ at d
+def g(i: int, s: str) -> None: pass  # E: Type of decorated function contains type "Any" (Callable[..., None])
+
+[builtins fixtures/list.pyi]
+[case testDisallowAnyDecoratedNonexistentDecorator]
+# flags: --disallow-any=decorated --ignore-missing-imports
+from nonexistent import d
+
+ at d
+def f() -> None: pass  # E: Function is untyped after decorator transformation
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyDecoratedPartlyTypedCallable]
+# flags: --disallow-any=decorated --ignore-missing-imports
+from typing import Callable, Any, List
+
+def d(f) -> Callable[[int, Any], Any]: pass
+def d2(f) -> Callable[[int], List[Any]]: pass
+def d3(f) -> Callable[[Any], List[str]]: pass
+
+ at d
+def f(i: int, s: str) -> None:  # E: Type of decorated function contains type "Any" (Callable[[int, Any], Any])
+    pass
+ at d2
+def g(i: int) -> None:  # E: Type of decorated function contains type "Any" (Callable[[int], List[Any]])
+    pass
+ at d3
+def h(i: int) -> None:  # E: Type of decorated function contains type "Any" (Callable[[Any], List[str]])
+    pass
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyDecoratedReturnsCallableNoParams]
+# flags: --disallow-any=decorated
+from typing import Callable
+
+def d(p) -> Callable[[], int]:
+    return p
+
+ at d
+def f(i):
+    return i
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyDecoratedDecoratorReturnsNonCallable]
+# flags: --disallow-any=decorated
+def d(p) -> int:
+    return p(0)
+
+ at d
+def f(i):
+    return i
+
+[case testDisallowAnyDecoratedUntypedUndecoratedFunction]
+# flags: --disallow-any=decorated
+from typing import Callable
+
+def f(i):  # no error
+    return i
+
+[case testDisallowAnyDecoratedTwoDecorators]
+# flags: --disallow-any=decorated
+from typing import Callable
+
+def typed_dec(f) -> Callable[[], int]: pass
+def untyped_dec(f): pass
+
+ at typed_dec
+ at untyped_dec
+def f():  # no error
+    return i
+
+ at untyped_dec
+ at typed_dec
+def g():  # E: Function is untyped after decorator transformation
+    return i
+
+[case testDisallowAnyExprSimple]
+# flags: --disallow-any=expr
+from typing import Any
+def f(s):
+    yield s
+
+x = f(0)  # E: Expression has type "Any"
+for x in f(0):  # E: Expression has type "Any"
+    g(x)  # E: Expression has type "Any"
+
+def g(x) -> Any:
+    yield x  # E: Expression has type "Any"
+
+l = [1, 2, 3]
+l[f(0)]  # E: Expression has type "Any"
+f(l)
+f(f(0))  # E: Expression has type "Any"
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyExprUnannotatedFunction]
+# flags: --disallow-any=expr
+def g(s):
+    return s
+
+g(0)
+w: int = g(1)
+
+[case testDisallowAnyExprExplicitAnyParam]
+# flags: --disallow-any=expr
+from typing import Any, List
+def f(s: Any) -> None:
+    pass
+
+def g(s: List[Any]) -> None:
+    pass
+
+f(0)
+
+# type of list below is inferred with expected type of List[Any], so that becomes it's type
+# instead of List[str]
+g([''])  # E: Expression type contains "Any" (has type List[Any])
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyExprAllowsAnyInCast]
+# flags: --disallow-any=expr
+from typing import Any, cast
+class Foo:
+    g: Any = 2
+
+z = cast(int, Foo().g)
+m = cast(Any, Foo().g)  # E: Expression has type "Any"
+k = Foo.g  # E: Expression has type "Any"
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyExprAllowsAnyInVariableAssignmentWithExplicitTypeAnnotation]
+# flags: --disallow-any=expr
+from typing import Any
+class Foo:
+    g: Any = 2
+
+z: int = Foo().g
+x = Foo().g  # type: int
+m: Any = Foo().g  # E: Expression has type "Any"
+n = Foo().g  # type: Any  # E: Expression has type "Any"
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyExprGeneric]
+# flags: --disallow-any=expr
+from typing import List
+
+l: List = []
+l.append(1)  # E: Expression type contains "Any" (has type List[Any])
+k = l[0]  # E: Expression type contains "Any" (has type List[Any])  # E: Expression has type "Any"
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyExprTypeVar]
+# flags: --disallow-any=expr
+from typing import TypeVar
+
+T = TypeVar('T')  # no error
+
+def f(t: T) -> T:
+    return t
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyExprNamedTuple]
+# flags: --disallow-any=expr
+from typing import NamedTuple
+
+Point = NamedTuple('Point', [('x', int), ('y', int)])  # no error
+
+def origin() -> Point:
+    return Point(x=0, y=0)
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyExprNewType]
+# flags: --disallow-any=expr
+from typing import NewType
+
+NT = NewType('NT', int)  # no error
+
+def nt() -> NT:
+    return NT(1)
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyExprEnum]
+# flags: --disallow-any=expr
+from enum import Enum
+E = Enum('E', '1, 2, 3')  # no error
+
+def k(s: E) -> None: pass
+[builtins fixtures/list.pyi]
+
+[case testDisallowAnyExprTypedDict]
+# flags: --disallow-any=expr
+from mypy_extensions import TypedDict
+
+Movie = TypedDict('Movie', {'name': str, 'year': int})
+
+def g(m: Movie) -> Movie:
+    return m
+[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
new file mode 100644
index 0000000..8f35821
--- /dev/null
+++ b/test-data/unit/check-functions.test
@@ -0,0 +1,2127 @@
+-- Test cases for the type checker related to functions, function types and
+-- calls.
+
+-- See also check-varargs.test.
+
+
+-- Callable type basics
+-- --------------------
+
+
+[case testCallingVariableWithFunctionType]
+from typing import Callable
+f = None # type: Callable[[A], B]
+a, b = None, None # type: (A, B)
+a = f(a)    # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = f(b)    # E: Argument 1 has incompatible type "B"; expected "A"
+b = f()     # E: Too few arguments
+b = f(a, a) # E: Too many arguments
+b = f(a)
+
+class A: pass
+class B: pass
+
+[case testKeywordOnlyArgumentOrderInsensitivity]
+import typing
+
+class A(object):
+    def f(self, *, a: int, b: str) -> None: pass
+
+class B(A):
+    def f(self, *, b: str, a: int) -> None: pass
+
+class C(A):
+    def f(self, *, b: int, a: str) -> None: pass # E: Signature of "f" incompatible with supertype "A"
+
+[case testPositionalOverridingArgumentNameInsensitivity]
+import typing
+
+class A(object):
+    def f(self, a: int, b: str) -> None: pass
+
+class B(A):
+    def f(self, b: str, a: int) -> None: pass # E: Argument 1 of "f" incompatible with supertype "A" # E: Argument 2 of "f" incompatible with supertype "A"
+
+class C(A):
+    def f(self, foo: int, bar: str) -> None: pass
+
+
+[case testPositionalOverridingArgumentNamesCheckedWhenMismatchingPos]
+import typing
+
+class A(object):
+    def f(self, a: int, b: str) -> None: pass
+
+class B(A):
+    def f(self, b: int, a: str) -> None: pass # E: Signature of "f" incompatible with supertype "A"
+
+
+[case testSubtypingFunctionTypes]
+from typing import Callable
+
+class A: pass
+class B(A): pass
+
+f = None # type: Callable[[B], A]
+g = None # type: Callable[[A], A]  # subtype of f
+h = None # type: Callable[[B], B]  # subtype of f
+g = h  # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[A], A])
+h = f  # E: Incompatible types in assignment (expression has type Callable[[B], A], variable has type Callable[[B], B])
+h = g  # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[B], B])
+g = f  # E: Incompatible types in assignment (expression has type Callable[[B], A], variable has type Callable[[A], A])
+f = g
+f = h
+f = f
+g = g
+h = h
+
+[case testSubtypingFunctionsDoubleCorrespondence]
+
+def l(x) -> None: ...
+def r(__, *, x) -> None: ...
+r = l # E: Incompatible types in assignment (expression has type Callable[[Any], None], variable has type Callable[[Any, NamedArg(Any, 'x')], None])
+
+[case testSubtypingFunctionsRequiredLeftArgNotPresent]
+
+def l(x, y) -> None: ...
+def r(x) -> None: ...
+r = l # E: Incompatible types in assignment (expression has type Callable[[Any, Any], None], variable has type Callable[[Any], None])
+
+[case testSubtypingFunctionsImplicitNames]
+from typing import Any
+
+def f(a, b): pass
+def g(c: Any, d: Any) -> Any: pass
+
+ff = f
+gg = g
+
+gg = f
+ff = g
+
+[case testSubtypingFunctionsDefaultsNames]
+from typing import Callable
+
+def f(a: int, b: str) -> None: pass
+f_nonames = None # type: Callable[[int, str], None]
+def g(a: int, b: str = "") -> None: pass
+def h(aa: int, b: str = "") -> None: pass
+
+ff_nonames = f_nonames
+ff = f
+gg = g
+hh = h
+
+ff = gg
+ff_nonames = ff
+ff_nonames = f_nonames # reset
+ff = ff_nonames # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg(int, 'a'), Arg(str, 'b')], None])
+ff = f # reset
+gg = ff # E: Incompatible types in assignment (expression has type Callable[[Arg(int, 'a'), Arg(str, 'b')], None], variable has type Callable[[Arg(int, 'a'), DefaultArg(str, 'b')], None])
+gg = hh # E: Incompatible types in assignment (expression has type Callable[[Arg(int, 'aa'), DefaultArg(str, 'b')], None], variable has type Callable[[Arg(int, 'a'), DefaultArg(str, 'b')], None])
+
+[case testSubtypingFunctionsArgsKwargs]
+from typing import Any, Callable
+
+def everything(*args: Any, **kwargs: Any) -> None: pass
+everywhere = None # type: Callable[..., None]
+
+def specific_1(a: int, b: str) -> None: pass
+def specific_2(a: int, *, b: str) -> None: pass
+
+ss_1 = specific_1
+ss_2 = specific_2
+ee_def = everything
+ee_var = everywhere
+
+ss_1 = ee_def
+ss_1 = specific_1
+ss_2 = ee_def
+ss_2 = specific_2
+ee_def = everywhere
+ee_def = everything
+ee_var = everything
+ee_var = everywhere
+
+ee_var = specific_1 # The difference between Callable[..., blah] and one with a *args: Any, **kwargs: Any is that the ... goes loosely both ways.
+ee_def = specific_1 # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[VarArg(Any), KwArg(Any)], None])
+
+[builtins fixtures/dict.pyi]
+
+[case testSubtypingFunctionsDecorated]
+from typing import Any
+
+# untyped decorator
+def deco(f): pass
+
+class A:
+    @deco
+    def f(self) -> Any:
+        pass
+
+class B(A):
+    @deco
+    def f(self) -> Any:
+        pass
+
+[builtins fixtures/list.pyi]
+
+
+[case testLackOfNames]
+def f(__a: int, __b: str) -> None: pass
+def g(a: int, b: str) -> None: pass
+
+ff = f
+gg = g
+
+ff = g
+gg = f # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg(int, 'a'), Arg(str, 'b')], None])
+
+[case testLackOfNamesFastparse]
+
+
+def f(__a: int, __b: str) -> None: pass
+def g(a: int, b: str) -> None: pass
+
+ff = f
+gg = g
+
+ff = g
+gg = f # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg(int, 'a'), Arg(str, 'b')], None])
+
+[case testFunctionTypeCompatibilityWithOtherTypes]
+from typing import Callable
+f = None # type: Callable[[], None]
+a, o = None, None # type: (A, object)
+a = f   # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type "A")
+f = a   # E: Incompatible types in assignment (expression has type "A", variable has type Callable[[], None])
+f = o   # E: Incompatible types in assignment (expression has type "object", variable has type Callable[[], None])
+f = f() # E: Function does not return a value
+
+f = f
+f = None
+o = f
+
+class A: pass
+
+[case testFunctionSubtypingWithVoid]
+from typing import Callable
+f = None # type: Callable[[], None]
+g = None # type: Callable[[], object]
+f = g  # E: Incompatible types in assignment (expression has type Callable[[], object], variable has type Callable[[], None])
+g = f  # OK
+
+f = f
+g = g
+
+[case testFunctionSubtypingWithMultipleArgs]
+from typing import Callable
+f = None # type: Callable[[A, A], None]
+g = None # type: Callable[[A, B], None]
+h = None # type: Callable[[B, B], None]
+f = g  # E: Incompatible types in assignment (expression has type Callable[[A, B], None], variable has type Callable[[A, A], None])
+f = h  # E: Incompatible types in assignment (expression has type Callable[[B, B], None], variable has type Callable[[A, A], None])
+g = h  # E: Incompatible types in assignment (expression has type Callable[[B, B], None], variable has type Callable[[A, B], None])
+g = f
+h = f
+h = g
+f = f
+g = g
+h = h
+
+class A: pass
+class B(A): pass
+
+[case testFunctionTypesWithDifferentArgumentCounts]
+from typing import Callable
+f = None # type: Callable[[], None]
+g = None # type: Callable[[A], None]
+h = None # type: Callable[[A, A], None]
+
+f = g   # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[], None])
+f = h   # E: Incompatible types in assignment (expression has type Callable[[A, A], None], variable has type Callable[[], None])
+h = f   # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type Callable[[A, A], None])
+h = g   # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[A, A], None])
+
+f = f
+g = g
+h = h
+
+class A: pass
+[out]
+
+[case testCompatibilityOfSimpleTypeObjectWithStdType]
+
+t = None # type: type
+a = None # type: A
+
+a = A # E: Incompatible types in assignment (expression has type Type[A], variable has type "A")
+t = f # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type "type")
+t = A
+
+class A:
+    def __init__(self, a: 'A') -> None: pass
+
+def f() -> None: pass
+
+[case testFunctionTypesWithOverloads]
+from foo import *
+[file foo.pyi]
+from typing import Callable, overload
+f = None # type: Callable[[AA], A]
+g = None # type: Callable[[B], B]
+h = None # type: Callable[[A], AA]
+
+h = i  # E: Incompatible types in assignment (expression has type overloaded function, variable has type Callable[[A], AA])
+f = j
+
+f = i
+g = i
+g = j
+
+class A: pass
+class AA(A): pass
+
+class B: pass
+
+ at overload
+def i(x: AA) -> A:
+    pass
+ at overload
+def i(x: B) -> B:
+    pass
+
+ at overload
+def j(x: B) -> B:
+    pass
+ at overload
+def j(x: A) -> AA:
+    pass
+
+[case testOverloadWithThreeItems]
+from foo import *
+[file foo.pyi]
+from typing import Callable, overload
+g1 = None # type: Callable[[A], A]
+g2 = None # type: Callable[[B], B]
+g3 = None # type: Callable[[C], C]
+g4 = None # type: Callable[[A], B]
+a, b, c = None, None, None # type: (A, B, C)
+
+b = f(a)  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(b)  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = f(c)  # E: Incompatible types in assignment (expression has type "C", variable has type "B")
+g4 = f    # E: Incompatible types in assignment (expression has type overloaded function, variable has type Callable[[A], B])
+
+g1 = f
+g2 = f
+g3 = f
+a = f(a)
+b = f(b)
+c = f(c)
+
+class A: pass
+class B: pass
+class C: pass
+
+ at overload
+def f(x: A) -> A: pass
+ at overload
+def f(x: B) -> B: pass
+ at overload
+def f(x: C) -> C: pass
+
+[case testInferConstraintsUnequalLengths]
+from typing import Any, Callable, List
+def f(fields: List[Callable[[Any], Any]]): pass
+class C: pass
+f([C])  # E: List item 0 has incompatible type Type[C]
+class D:
+    def __init__(self, a, b): pass
+f([D])  # E: List item 0 has incompatible type Type[D]
+[builtins fixtures/list.pyi]
+
+[case testSubtypingTypeTypeAsCallable]
+from typing import Callable, Type
+class A: pass
+x = None  # type: Callable[..., A]
+y = None  # type: Type[A]
+x = y
+
+[case testSubtypingCallableAsTypeType]
+from typing import Callable, Type
+class A: pass
+x = None  # type: Callable[..., A]
+y = None  # type: Type[A]
+y = x  # E: Incompatible types in assignment (expression has type Callable[..., A], variable has type Type[A])
+
+-- Default argument values
+-- -----------------------
+
+
+[case testCallingFunctionsWithDefaultArgumentValues]
+
+a, b = None, None # type: (A, B)
+a = f()     # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = f(b)    # E: Argument 1 to "f" has incompatible type "B"; expected "Optional[A]"
+b = f(a, a) # E: Too many arguments for "f"
+
+b = f()
+b = f(a)
+b = f(AA())
+
+def f(x: 'A'  =  None) -> 'B': pass
+
+class A: pass
+class AA(A): pass
+class B: pass
+
+[case testDefaultArgumentExpressions]
+import typing
+def f(x: 'A' = A()) -> None:
+    b = x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = x # type: A
+
+class B: pass
+class A: pass
+[out]
+
+[case testDefaultArgumentExpressions2]
+import typing
+def f(x: 'A' = B()) -> None: # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = x # type: B      # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = x # type: A
+
+class B: pass
+class A: pass
+[out]
+
+[case testDefaultArgumentsWithSubtypes]
+import typing
+def f(x: 'B' = A()) -> None: # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    pass
+def g(x: 'A' = B()) -> None:
+    pass
+
+class A: pass
+class B(A): pass
+[out]
+
+[case testMultipleDefaultArgumentExpressions]
+import typing
+def f(x: 'A' = B(), y: 'B' = B()) -> None: # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    pass
+def h(x: 'A' = A(), y: 'B' = B()) -> None:
+    pass
+
+class A: pass
+class B: pass
+[out]
+
+[case testMultipleDefaultArgumentExpressions2]
+import typing
+def g(x: 'A' = A(), y: 'B' = A()) -> None: # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    pass
+
+class A: pass
+class B: pass
+[out]
+
+[case testDefaultArgumentsAndSignatureAsComment]
+import typing
+def f(x = 1): # type: (int) -> str
+    pass
+f()
+f(1)
+f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testMethodDefaultArgumentsAndSignatureAsComment]
+import typing
+class A:
+    def f(self, x = 1): # type: (int) -> str
+        pass
+A().f()
+A().f(1)
+A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+
+-- Access to method defined as a data attribute
+-- --------------------------------------------
+
+
+[case testMethodAsDataAttribute]
+from typing import Any, Callable
+class B: pass
+x = None # type: Any
+class A:
+    f = x # type: Callable[[A], None]
+    g = x # type: Callable[[A, B], None]
+a = None # type: A
+a.f()
+a.g(B())
+a.f(a) # E: Too many arguments
+a.g()  # E: Too few arguments
+
+[case testMethodWithInvalidMethodAsDataAttribute]
+from typing import Any, Callable
+class B: pass
+x = None # type: Any
+class A:
+    f = x # type: Callable[[], None]
+    g = x # type: Callable[[B], None]
+a = None # type: A
+a.f() # E: Invalid method type
+a.g() # E: Invalid method type
+
+[case testMethodWithDynamicallyTypedMethodAsDataAttribute]
+from typing import Any, Callable
+class B: pass
+x = None # type: Any
+class A:
+    f = x # type: Callable[[Any], Any]
+a = None # type: A
+a.f()
+a.f(a) # E: Too many arguments
+
+[case testOverloadedMethodAsDataAttribute]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class B: pass
+class A:
+    @overload
+    def f(self) -> None: pass
+    @overload
+    def f(self, b: B) -> None: pass
+    g = f
+a = None # type: A
+a.g()
+a.g(B())
+a.g(a) # E: No overload variant matches argument types [foo.A]
+
+[case testMethodAsDataAttributeInferredFromDynamicallyTypedMethod]
+
+class A:
+    def f(self, x): pass
+    g = f
+a = None # type: A
+a.g(object())
+a.g(a, a) # E: Too many arguments
+a.g()     # E: Too few arguments
+
+[case testMethodAsDataAttributeInGenericClass]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class B: pass
+class A(Generic[t]):
+    def f(self, x: t) -> None: pass
+    g = f
+a = None # type: A[B]
+a.g(B())
+a.g(a)   # E: Argument 1 has incompatible type A[B]; expected "B"
+
+[case testInvalidMethodAsDataAttributeInGenericClass]
+from typing import Any, TypeVar, Generic, Callable
+t = TypeVar('t')
+class B: pass
+class C: pass
+x = None # type: Any
+class A(Generic[t]):
+    f = x # type: Callable[[A[B]], None]
+ab = None # type: A[B]
+ac = None # type: A[C]
+ab.f()
+ac.f()   # E: Invalid method type
+
+[case testPartiallyTypedSelfInMethodDataAttribute]
+from typing import Any, TypeVar, Generic, Callable
+t = TypeVar('t')
+class B: pass
+class C: pass
+x = None # type: Any
+class A(Generic[t]):
+    f = x # type: Callable[[A], None]
+ab = None # type: A[B]
+ac = None # type: A[C]
+ab.f()
+ac.f()
+
+[case testCallableDataAttribute]
+from typing import Callable
+class A:
+    g = None # type: Callable[[A], None]
+    def __init__(self, f: Callable[[], None]) -> None:
+        self.f = f
+a = A(None)
+a.f()
+a.g()
+a.f(a) # E: Too many arguments
+a.g(a) # E: Too many arguments
+
+
+-- Nested functions
+-- ----------------
+
+
+[case testSimpleNestedFunction]
+import typing
+def f(a: 'A') -> None:
+    def g(b: 'B') -> None:
+         b = a # fail
+         aa = a # type: A # ok
+         b = B()
+    g(a) # fail
+    g(B())
+class A: pass
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:7: error: Argument 1 to "g" has incompatible type "A"; expected "B"
+
+[case testReturnAndNestedFunction]
+import typing
+def f() -> 'A':
+    def g() -> 'B':
+        return A() # fail
+        return B()
+    return B() # fail
+    return A()
+class A: pass
+class B: pass
+[out]
+main:4: error: Incompatible return value type (got "A", expected "B")
+main:6: error: Incompatible return value type (got "B", expected "A")
+
+[case testDynamicallyTypedNestedFunction]
+import typing
+def f(x: object) -> None:
+    def g(y):
+        pass
+    g() # E: Too few arguments for "g"
+    g(x)
+[out]
+
+[case testNestedFunctionInMethod]
+import typing
+class A:
+    def f(self) -> None:
+        def g(x: int) -> None:
+            y = x # type: int
+            a = x # type: A # fail
+        g(2)
+        g(A()) # fail
+[out]
+main:6: error: Incompatible types in assignment (expression has type "int", variable has type "A")
+main:8: error: Argument 1 to "g" has incompatible type "A"; expected "int"
+
+[case testMutuallyRecursiveNestedFunctions]
+def f() -> None:
+    def g() -> None:
+        h(1)
+        h('') # E
+    def h(x: int) -> None:
+        g()
+        g(1) # E
+[out]
+main:4: error: Argument 1 to "h" has incompatible type "str"; expected "int"
+main:7: error: Too many arguments for "g"
+
+[case testMutuallyRecursiveDecoratedFunctions]
+from typing import Callable, Any
+def dec(f) -> Callable[..., Any]: pass
+def f() -> None:
+    @dec
+    def g() -> None:
+        h()
+        h.x # E
+    @dec
+    def h(x: int) -> None:
+        g(1)
+        g.x # E
+[out]
+main:7: error: Callable[..., Any] has no attribute "x"
+main:11: error: Callable[..., Any] has no attribute "x"
+
+[case testNestedGenericFunctions]
+from typing import TypeVar
+T = TypeVar('T')
+U = TypeVar('U')
+
+def outer(x: T) -> T:
+    def inner(y: U) -> T: ...
+    return inner(1)
+
+
+-- Casts
+-- -----
+
+
+[case testCastsToAndFromFunctionTypes]
+from typing import TypeVar, Callable, Any, cast
+t = TypeVar('t')
+def f(x: t,
+      f1: Callable[[], None],
+      f2: Callable[[Any], None], o: object) -> None:
+    x = cast(t, f1)
+    f1 = cast(Callable[[], None], x)
+    f1 = cast(Callable[[], None], f2)
+    f1 = cast(Callable[[], None], o)
+
+
+-- Function decorators
+-- -------------------
+
+
+[case testTrivialStaticallyTypedFunctionDecorator]
+from typing import TypeVar
+t = TypeVar('t')
+def dec(f: t) -> t:
+    return f
+ at dec
+def f(x: int) -> None: pass
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testTrivialStaticallyTypedMethodDecorator]
+from typing import TypeVar
+t = TypeVar('t')
+def dec(f: t) -> t:
+    return f
+class A:
+    @dec
+    def f(self, x: int) -> None: pass
+A().f(1)
+A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+class B: pass
+
+[case testTrivialDecoratedNestedFunction]
+from typing import TypeVar
+t = TypeVar('t')
+def dec(f: t) -> t:
+    return f
+def g() -> None:
+    @dec
+    def f(x: int) -> None: pass
+    f(1)
+    f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[out]
+
+[case testCheckingDecoratedFunction]
+import typing
+def dec(f): pass
+ at dec
+def f(x: 'A') -> None:
+    a = x # type: A
+    x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+class A: pass
+[out]
+
+[case testDecoratorThatSwitchesType]
+from typing import Callable
+def dec(x) -> Callable[[], None]: pass
+ at dec
+def f(y): pass
+f()
+f(None) # E: Too many arguments for "f"
+
+[case testDecoratorThatSwitchesTypeWithMethod]
+from typing import Any, Callable
+def dec(x) -> Callable[[Any], None]: pass
+class A:
+    @dec
+    def f(self, a, b, c): pass
+a = None # type: A
+a.f()
+a.f(None) # E: Too many arguments for "f" of "A"
+
+[case testNestedDecorators]
+from typing import Any, Callable
+def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass
+def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
+ at dec1
+ at dec2
+def f(x, y): pass
+f()
+f(None) # E: Too many arguments for "f"
+
+[case testInvalidDecorator1]
+from typing import Any, Callable
+def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass
+def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
+ at dec1 # E: Argument 1 to "dec2" has incompatible type Callable[[Any], Any]; expected Callable[[Any, Any], None]
+ at dec2
+def f(x): pass
+
+[case testInvalidDecorator2]
+from typing import Any, Callable
+def dec1(f: Callable[[Any, Any], None]) -> Callable[[], None]: pass
+def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
+ at dec1 # E: Argument 1 to "dec1" has incompatible type Callable[[Any], None]; expected Callable[[Any, Any], None]
+ at dec2
+def f(x, y): pass
+
+[case testNoTypeCheckDecoratorOnMethod1]
+from typing import no_type_check
+
+ at no_type_check
+def foo(x: 'bar', y: {'x': 4}) -> 42:
+    1 + 'x'
+
+[case testNoTypeCheckDecoratorOnMethod2]
+import typing
+
+ at typing.no_type_check
+def foo(x: 's', y: {'x': 4}) -> 42:
+    1 + 'x'
+
+ at typing.no_type_check
+def bar() -> None:
+    1 + 'x'
+
+[case testCallingNoTypeCheckFunction]
+import typing
+
+ at typing.no_type_check
+def foo(x: {1:2}) -> [1]:
+    1 + 'x'
+
+foo()
+foo(1, 'b')
+
+[case testCallingNoTypeCheckFunction2]
+import typing
+
+def f() -> None:
+    foo()
+
+ at typing.no_type_check
+def foo(x: {1:2}) -> [1]:
+    1 + 'x'
+
+[case testNoTypeCheckDecoratorSemanticError]
+import typing
+
+ at typing.no_type_check
+def foo(x: {1:2}) -> [1]:
+    x = y
+
+
+-- Forward references to decorated functions
+-- -----------------------------------------
+
+
+[case testForwardReferenceToDynamicallyTypedDecorator]
+def f(self) -> None:
+    g()
+    g(1)
+
+def dec(f):
+    return f
+
+ at dec
+def g():
+    pass
+
+[case testForwardReferenceToDecoratorWithAnyReturn]
+from typing import Any
+
+def f(self) -> None:
+    g()
+    g(1)
+
+def dec(f) -> Any:
+    return f
+
+ at dec
+def g():
+    pass
+
+[case testForwardReferenceToDecoratorWithIdentityMapping]
+from typing import TypeVar
+
+def f(self) -> None:
+    g()
+    g(1) # E: Too many arguments for "g"
+    h(1).x # E: "str" has no attribute "x"
+    h('') # E: Argument 1 to "h" has incompatible type "str"; expected "int"
+
+T = TypeVar('T')
+def dec(f: T) -> T:
+    return f
+
+ at dec
+def g(): pass
+ at dec
+def h(x: int) -> str: pass
+[out]
+
+[case testForwardReferenceToDynamicallyTypedDecoratedMethod]
+def f(self) -> None:
+    A().f(1).y
+    A().f()
+
+class A:
+    @dec
+    def f(self, x): pass
+
+def dec(f): return f
+[builtins fixtures/staticmethod.pyi]
+
+[case testForwardReferenceToStaticallyTypedDecoratedMethod]
+from typing import TypeVar
+
+def f(self) -> None:
+    A().f(1).y # E: "str" has no attribute "y"
+    A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+class A:
+    @dec
+    def f(self, a: int) -> str: return ''
+
+T = TypeVar('T')
+def dec(f: T) -> T: return f
+[builtins fixtures/staticmethod.pyi]
+[out]
+
+[case testForwardReferenceToDynamicallyTypedProperty]
+def f(self) -> None:
+    A().x.y
+
+class A:
+    @property
+    def x(self): pass
+[builtins fixtures/property.pyi]
+
+[case testForwardReferenceToStaticallyTypedProperty]
+def f(self) -> None:
+    A().x.y # E: "int" has no attribute "y"
+
+class A:
+    @property
+    def x(self) -> int: return 1
+[builtins fixtures/property.pyi]
+[out]
+
+[case testForwardReferenceToDynamicallyTypedStaticMethod]
+def f(self) -> None:
+    A.x(1).y
+    A.x() # E: Too few arguments for "x"
+
+class A:
+    @staticmethod
+    def x(x): pass
+[builtins fixtures/staticmethod.pyi]
+[out]
+
+[case testForwardReferenceToStaticallyTypedStaticMethod]
+def f(self) -> None:
+    A.x(1).y # E: "str" has no attribute "y"
+    A.x('') # E: Argument 1 to "x" of "A" has incompatible type "str"; expected "int"
+
+class A:
+    @staticmethod
+    def x(a: int) -> str: return ''
+[builtins fixtures/staticmethod.pyi]
+[out]
+
+[case testForwardReferenceToDynamicallyTypedClassMethod]
+def f(self) -> None:
+    A.x(1).y
+    A.x() # E: Too few arguments for "x"
+
+class A:
+    @classmethod
+    def x(cls, a): pass
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testForwardReferenceToStaticallyTypedClassMethod]
+def f(self) -> None:
+    A.x(1).y # E: "str" has no attribute "y"
+    A.x('') # E: Argument 1 to "x" of "A" has incompatible type "str"; expected "int"
+
+class A:
+    @classmethod
+    def x(cls, x: int) -> str: return ''
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testForwardReferenceToDecoratedFunctionUsingMemberExpr]
+import m
+
+def f(self) -> None:
+    g(1).x # E: "str" has no attribute "x"
+
+ at m.dec
+def g(x: int) -> str: pass
+[file m.py]
+from typing import TypeVar
+T = TypeVar('T')
+def dec(f: T) -> T:
+    return f
+[out]
+
+[case testForwardReferenceToFunctionWithMultipleDecorators]
+def f(self) -> None:
+    g()
+    g(1)
+
+def dec(f):
+    return f
+
+ at dec
+ at dec2
+def g():
+    pass
+
+def dec2(f):
+    return f
+
+[case testForwardReferenceToDynamicallyTypedDecoratedStaticMethod]
+def f(self) -> None:
+    A().f(1).y
+    A().f()
+    A().g(1).y
+    A().g()
+
+class A:
+    @dec
+    @staticmethod
+    def f(self, x): pass
+    @staticmethod
+    @dec
+    def g(self, x): pass
+
+def dec(f): return f
+[builtins fixtures/staticmethod.pyi]
+
+[case testForwardRefereceToDecoratedFunctionWithCallExpressionDecorator]
+def f(self) -> None:
+    g()
+    g(1)
+
+ at dec(1)
+def g(): pass
+
+def dec(f): pass
+
+
+-- Decorator functions in import cycles
+-- ------------------------------------
+
+
+[case testDecoratorWithIdentityTypeInImportCycle]
+import a
+
+[file a.py]
+import b
+from d import dec
+ at dec
+def f(x: int) -> None: pass
+b.g(1) # E
+
+[file b.py]
+import a
+from d import dec
+ at dec
+def g(x: str) -> None: pass
+a.f('')
+
+[file d.py]
+from typing import TypeVar
+T = TypeVar('T')
+def dec(f: T) -> T: return f
+
+[out]
+tmp/b.py:5: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+tmp/a.py:5: error: Argument 1 to "g" has incompatible type "int"; expected "str"
+
+[case testDecoratorWithNoAnnotationInImportCycle]
+import a
+
+[file a.py]
+import b
+from d import dec
+ at dec
+def f(x: int) -> None: pass
+b.g(1, z=4)
+
+[file b.py]
+import a
+from d import dec
+ at dec
+def g(x: str) -> None: pass
+a.f('', y=2)
+
+[file d.py]
+def dec(f): return f
+
+[case testDecoratorWithFixedReturnTypeInImportCycle]
+import a
+
+[file a.py]
+import b
+from d import dec
+ at dec
+def f(x: int) -> str: pass
+b.g(1)()
+
+[file b.py]
+import a
+from d import dec
+ at dec
+def g(x: int) -> str: pass
+a.f(1)()
+
+[file d.py]
+from typing import Callable
+def dec(f: Callable[[int], str]) -> Callable[[int], str]: return f
+
+[out]
+tmp/b.py:5: error: "str" not callable
+tmp/a.py:5: error: "str" not callable
+
+[case testDecoratorWithCallAndFixedReturnTypeInImportCycle]
+import a
+
+[file a.py]
+import b
+from d import dec
+ at dec()
+def f(x: int) -> str: pass
+b.g(1)()
+
+[file b.py]
+import a
+from d import dec
+ at dec()
+def g(x: int) -> str: pass
+a.f(1)()
+
+[file d.py]
+from typing import Callable
+def dec() -> Callable[[Callable[[int], str]], Callable[[int], str]]: pass
+
+[out]
+tmp/b.py:5: error: "str" not callable
+tmp/a.py:5: error: "str" not callable
+
+[case testDecoratorWithCallAndFixedReturnTypeInImportCycleAndDecoratorArgs]
+import a
+
+[file a.py]
+import b
+from d import dec
+ at dec(1)
+def f(x: int) -> str: pass
+b.g(1)()
+
+[file b.py]
+import a
+from d import dec
+ at dec(1)
+def g(x: int) -> str: pass
+a.f(1)()
+
+[file d.py]
+from typing import Callable
+def dec(x: str) -> Callable[[Callable[[int], str]], Callable[[int], str]]: pass
+
+[out]
+tmp/b.py:3: error: Argument 1 to "dec" has incompatible type "int"; expected "str"
+tmp/b.py:5: error: "str" not callable
+tmp/a.py:3: error: Argument 1 to "dec" has incompatible type "int"; expected "str"
+tmp/a.py:5: error: "str" not callable
+
+[case testUndefinedDecoratorInImportCycle]
+# cmd: mypy -m foo.base
+[file foo/__init__.py]
+import foo.base
+class Derived(foo.base.Base):
+    def method(self) -> None: pass
+[file foo/base.py]
+import foo
+class Base:
+    @decorator
+    def method(self) -> None: pass
+[out]
+tmp/foo/base.py:3: error: Name 'decorator' is not defined
+
+
+-- Conditional function definition
+-- -------------------------------
+
+
+[case testTypeCheckBodyOfConditionalFunction]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None:
+        x = 1
+        x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[out]
+
+[case testCallConditionalFunction]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None:  pass
+    f(1)
+    f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testConditionalFunctionDefinitionWithIfElse]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None:
+        x = 'x'   # fail
+        x = 1
+else:
+    def f(x: int) -> None:
+        x + 'x'   # fail
+        x = 1
+f(1)
+f('x') # fail
+[out]
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:9: error: Unsupported operand types for + ("int" and "str")
+main:12: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testNestedConditionalFunctionDefinitionWithIfElse]
+from typing import Any
+x = None # type: Any
+def top() -> None:
+    if x:
+        def f(x: int) -> None:
+            x = 'x'   # fail
+            x = 1
+    else:
+        def f(x: int) -> None:
+            x + 'x'   # fail
+            x = 1
+    f(1)
+    f('x') # fail
+[out]
+main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:10: error: Unsupported operand types for + ("int" and "str")
+main:13: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testUnconditionalRedefinitionOfConditionalFunction]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(): pass
+def f(): pass # E: Name 'f' already defined on line 4
+
+[case testIncompatibleConditionalFunctionDefinition]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None: pass
+else:
+    def f(x): pass # E: All conditional function variants must have identical signatures
+
+[case testIncompatibleConditionalFunctionDefinition2]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None: pass
+else:
+    def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures
+
+[case testIncompatibleConditionalFunctionDefinition3]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None: pass
+else:
+    def f(x: int = 0) -> None: pass # E: All conditional function variants must have identical signatures
+
+[case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1]
+from typing import Any
+def f(x: str) -> None: pass
+x = None # type: Any
+if x:
+    def f(x: int) -> None: pass # E: All conditional function variants must have identical signatures
+
+[case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1]
+from typing import Any
+def f(x: int) -> None: pass # N: "f" defined here
+x = None # type: Any
+if x:
+    def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures
+f(x=1) # The first definition takes precedence.
+f(y=1) # E: Unexpected keyword argument "y" for "f"
+
+[case testRedefineFunctionDefinedAsVariable]
+def g(): pass
+f = g
+if g():
+    def f(): pass
+f()
+f(1) # E: Too many arguments
+
+[case testRedefineFunctionDefinedAsVariableInitializedToNone]
+def g(): pass
+f = None
+if g():
+    def f(): pass
+f()
+f(1) # E: Too many arguments for "f"
+
+[case testRedefineNestedFunctionDefinedAsVariableInitializedToNone]
+def g() -> None:
+    f = None
+    if object():
+        def f(x: int) -> None: pass
+    f() # E: Too few arguments for "f"
+    f(1)
+    f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[out]
+
+[case testRedefineFunctionDefinedAsVariableWithInvalidSignature]
+def g(): pass
+f = g
+if g():
+    def f(x): pass  # E: Incompatible redefinition (redefinition with type Callable[[Any], Any], original type Callable[[], Any])
+
+[case testRedefineFunctionDefinedAsVariableWithVariance1]
+class B: pass
+class C(B): pass
+def g(x: C) -> B: pass
+f = g
+if g(C()):
+    def f(x: C) -> C: pass
+
+[case testRedefineFunctionDefinedAsVariableWithVariance2]
+class B: pass
+class C(B): pass
+def g(x: C) -> B: pass
+f = g
+if g(C()):
+    def f(x: B) -> B: pass
+
+[case testRedefineFunctionDefinedAsVariableInitializedToEmptyList]
+f = [] # E: Need type annotation for variable
+if object():
+    def f(): pass # E: Incompatible redefinition
+f()
+f(1)
+[builtins fixtures/list.pyi]
+
+
+-- Conditional method definition
+-- -----------------------------
+
+
+[case testTypeCheckBodyOfConditionalMethod]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+        def f(self, x: int) -> None:
+            x = 1
+            x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[out]
+
+[case testCallConditionalMethodInClassBody]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+        def f(self, x: int) -> None:  pass
+        f(x, 1)
+        f(x, 'x') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int"
+    f(x, 1)
+    f(x, 'x') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int"
+[out]
+
+[case testCallConditionalMethodViaInstance]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+         def f(self, x: int) -> None: pass
+A().f(1)
+A().f('x') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testConditionalMethodDefinitionWithIfElse]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+        def f(self, x: int) -> None:
+            x = 'x'   # fail
+            x = 1
+    else:
+        def f(self, x: int) -> None:
+            x + 'x'   # fail
+            x = 1
+A().f(1)
+A().f('x') # fail
+[out]
+main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:10: error: Unsupported operand types for + ("int" and "str")
+main:13: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testUnconditionalRedefinitionOfConditionalMethod]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+        def f(self): pass
+    def f(self): pass # E: Name 'f' already defined
+
+[case testIncompatibleConditionalMethodDefinition]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+        def f(self, x: int) -> None: pass
+    else:
+        def f(self, x): pass # E: All conditional function variants must have identical signatures
+[out]
+
+[case testConditionalFunctionDefinitionInTry]
+import typing
+try:
+    def f(x: int) -> None: pass
+except:
+    def g(x: str) -> None: pass
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+g('x')
+g(1) # E: Argument 1 to "g" has incompatible type "int"; expected "str"
+
+-- Callable with specific arg list
+-- -------------------------------
+
+[case testCallableWithNamedArg]
+from typing import Callable
+from mypy_extensions import Arg
+
+def a(f: Callable[[Arg(int, 'x')], int]):
+    f(x=4)
+    f(5)
+    f(y=3) # E: Unexpected keyword argument "y"
+
+[builtins fixtures/dict.pyi]
+
+[case testCallableWithOptionalArg]
+from typing import Callable
+from mypy_extensions import DefaultArg
+
+def a(f: Callable[[DefaultArg(int, 'x')], int]):
+    f(x=4)
+    f(2)
+    f()
+    f(y=3) # E: Unexpected keyword argument "y"
+    f("foo") # E: Argument 1 has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testCallableWithNamedArgFromExpr]
+from typing import Callable
+from mypy_extensions import Arg
+
+F = Callable[[Arg(int, 'x')], int]
+
+def a(f: F):
+    f(x=4)
+    f(5)
+    f(y=3) # E: Unexpected keyword argument "y"
+
+[builtins fixtures/dict.pyi]
+
+[case testCallableWithOptionalArgFromExpr]
+from typing import Callable
+from mypy_extensions import DefaultArg
+
+F = Callable[[DefaultArg(int, 'x')], int]
+def a(f: F):
+    f(x=4)
+    f(2)
+    f()
+    f(y=3) # E: Unexpected keyword argument "y"
+    f("foo") # E: Argument 1 has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testCallableParsingInInheritence]
+
+from collections import namedtuple
+class C(namedtuple('t', 'x')):
+    pass
+
+[case testCallableParsingSameName]
+from typing import Callable
+
+def Arg(x, y): pass
+
+F = Callable[[Arg(int, 'x')], int]  # E: Invalid argument constructor "__main__.Arg"
+
+[case testCallableParsingFromExpr]
+from typing import Callable, List
+from mypy_extensions import Arg, VarArg, KwArg
+import mypy_extensions
+
+def WrongArg(x, y): return y
+# Note that for this test, the 'Value of type "int" is not indexable' errors are silly,
+# and a consequence of Callable being set to an int in the test stub.  We can't set it to
+# something else sensible, because other tests require the stub not have anything
+# that looks like a function call.
+F = Callable[[WrongArg(int, 'x')], int] # E: Invalid argument constructor "__main__.WrongArg"
+G = Callable[[Arg(1, 'x')], int] # E: Invalid type alias # E: Value of type "int" is not indexable
+H = Callable[[VarArg(int, 'x')], int] # E: VarArg arguments should not have names
+I = Callable[[VarArg(int)], int] # ok
+J = Callable[[VarArg(), KwArg()], int] # ok
+K = Callable[[VarArg(), int], int] # E: Required positional args may not appear after default, named or var args
+L = Callable[[Arg(name='x', type=int)], int] # ok
+# I have commented out the following test because I don't know how to expect the "defined here" note part of the error.
+# M = Callable[[Arg(gnome='x', type=int)], int]   E: Invalid type alias   E: Unexpected keyword argument "gnome" for "Arg"
+N = Callable[[Arg(name=None, type=int)], int] # ok
+O = Callable[[List[Arg(int)]], int]  # E: Invalid type alias  # E: Value of type "int" is not indexable  # E: Type expected within [...] # E: The type Type[List[Any]] is not generic and not indexable
+P = Callable[[mypy_extensions.VarArg(int)], int] # ok
+Q = Callable[[Arg(int, type=int)], int]  # E: Invalid type alias  # E: Value of type "int" is not indexable  # E: "Arg" gets multiple values for keyword argument "type"
+R = Callable[[Arg(int, 'x', name='y')], int]  # E: Invalid type alias  # E: Value of type "int" is not indexable  # E: "Arg" gets multiple values for keyword argument "name"
+
+[builtins fixtures/dict.pyi]
+
+[case testCallableParsing]
+from typing import Callable
+from mypy_extensions import Arg, VarArg, KwArg
+
+def WrongArg(x, y): return y
+
+def b(f: Callable[[Arg(1, 'x')], int]): pass # E: invalid type comment or annotation
+def d(f: Callable[[VarArg(int)], int]): pass # ok
+def e(f: Callable[[VarArg(), KwArg()], int]): pass # ok
+def g(f: Callable[[Arg(name='x', type=int)], int]): pass # ok
+def h(f: Callable[[Arg(gnome='x', type=int)], int]): pass  # E: Unexpected argument "gnome" for argument constructor
+def i(f: Callable[[Arg(name=None, type=int)], int]): pass # ok
+def j(f: Callable[[Arg(int, 'x', name='y')], int]): pass # E: "Arg" gets multiple values for keyword argument "name"
+def k(f: Callable[[Arg(int, type=int)], int]): pass # E: "Arg" gets multiple values for keyword argument "type"
+
+[builtins fixtures/dict.pyi]
+
+[case testCallableTypeAnalysis]
+from typing import Callable
+from mypy_extensions import Arg, VarArg as VARG, KwArg
+import mypy_extensions as ext
+
+def WrongArg(x, y): return y
+def a(f: Callable[[WrongArg(int, 'x')], int]): pass # E: Invalid argument constructor "__main__.WrongArg"
+def b(f: Callable[[BadArg(int, 'x')], int]): pass # E: Name 'BadArg' is not defined
+def d(f: Callable[[ext.VarArg(int)], int]): pass # ok
+def e(f: Callable[[VARG(), ext.KwArg()], int]): pass # ok
+def g(f: Callable[[ext.Arg(name='x', type=int)], int]): pass # ok
+def i(f: Callable[[Arg(name=None, type=int)], int]): pass # ok
+
+def f1(*args) -> int: pass
+def f2(*args, **kwargs) -> int: pass
+
+d(f1)
+e(f2)
+d(f2)
+e(f1)  # E: Argument 1 to "e" has incompatible type Callable[[VarArg(Any)], int]; expected Callable[[VarArg(Any), KwArg(Any)], int]
+
+[builtins fixtures/dict.pyi]
+
+[case testCallableWrongTypeType]
+from typing import Callable
+from mypy_extensions import Arg
+def b(f: Callable[[Arg(1, 'x')], int]): pass # E: invalid type comment or annotation
+[builtins fixtures/dict.pyi]
+
+[case testCallableTooManyVarArg]
+from typing import Callable
+from mypy_extensions import VarArg
+def c(f: Callable[[VarArg(int, 'x')], int]): pass # E: VarArg arguments should not have names
+[builtins fixtures/dict.pyi]
+
+[case testCallableFastParseGood]
+from typing import Callable
+from mypy_extensions import VarArg, Arg, KwArg
+def d(f: Callable[[VarArg(int)], int]): pass # ok
+def e(f: Callable[[VarArg(), KwArg()], int]): pass # ok
+def g(f: Callable[[Arg(name='x', type=int)], int]): pass # ok
+def i(f: Callable[[Arg(name=None, type=int)], int]): pass # ok
+[builtins fixtures/dict.pyi]
+
+[case testCallableFastParseBadArgArgName]
+from typing import Callable
+from mypy_extensions import Arg
+def h(f: Callable[[Arg(gnome='x', type=int)], int]): pass # E: Unexpected argument "gnome" for argument constructor
+[builtins fixtures/dict.pyi]
+
+[case testCallableKindsOrdering]
+from typing import Callable, Any
+from mypy_extensions import Arg, VarArg, KwArg, DefaultArg, NamedArg
+
+def f(f: Callable[[VarArg(), int], int]): pass # E: Required positional args may not appear after default, named or var args
+def g(f: Callable[[VarArg(), VarArg()], int]): pass # E: Var args may not appear after named or var args
+def h(f: Callable[[KwArg(), KwArg()], int]): pass # E: You may only have one **kwargs argument
+def i(f: Callable[[DefaultArg(), int], int]): pass # E: Required positional args may not appear after default, named or var args
+def j(f: Callable[[NamedArg(Any, 'x'), DefaultArg(int, 'y')], int]): pass # E: Positional default args may not appear after named or var args
+def k(f: Callable[[KwArg(), NamedArg(Any, 'x')], int]): pass # E: A **kwargs argument must be the last argument
+[builtins fixtures/dict.pyi]
+
+[case testCallableDuplicateNames]
+from typing import Callable
+from mypy_extensions import Arg, VarArg, KwArg, DefaultArg
+
+def f(f: Callable[[Arg(int, 'x'), int, Arg(int, 'x')], int]): pass # E: Duplicate argument 'x' in Callable
+
+[builtins fixtures/dict.pyi]
+
+
+[case testCallableWithKeywordOnlyArg]
+from typing import Callable
+from mypy_extensions import NamedArg
+
+def a(f: Callable[[NamedArg(int, 'x')], int]):
+    f(x=4)
+    f(2) # E: Too many positional arguments
+    f() # E: Missing named argument "x"
+    f(y=3) # E: Unexpected keyword argument "y"
+    f(x="foo") # E: Argument 1 has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testCallableWithKeywordOnlyOptionalArg]
+from typing import Callable
+from mypy_extensions import DefaultNamedArg
+
+def a(f: Callable[[DefaultNamedArg(int, 'x')], int]):
+    f(x=4)
+    f(2) # E: Too many positional arguments
+    f()
+    f(y=3) # E: Unexpected keyword argument "y"
+    f(x="foo") # E: Argument 1 has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testCallableWithKwargs]
+from typing import Callable
+from mypy_extensions import KwArg
+
+def a(f: Callable[[KwArg(int)], int]):
+    f(x=4)
+    f(2) # E: Too many arguments
+    f()
+    f(y=3)
+    f(x=4, y=3, z=10)
+    f(x="foo") # E: Argument 1 has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+
+[case testCallableWithVarArg]
+from typing import Callable
+from mypy_extensions import VarArg
+
+def a(f: Callable[[VarArg(int)], int]):
+    f(x=4)  # E: Unexpected keyword argument "x"
+    f(2)
+    f()
+    f(3, 4, 5)
+    f("a")  # E: Argument 1 has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testCallableArgKindSubtyping]
+from typing import Callable
+from mypy_extensions import Arg, DefaultArg
+
+int_str_fun = None # type: Callable[[int, str], str]
+int_opt_str_fun = None # type: Callable[[int, DefaultArg(str, None)], str]
+int_named_str_fun = None # type: Callable[[int, Arg(str, 's')], str]
+
+def isf(ii: int, ss: str) -> str:
+    return ss
+
+def iosf(i: int, s: str = "bar") -> str:
+    return s
+
+def isf_unnamed(__i: int, __s: str) -> str:
+    return __s
+
+int_str_fun = isf
+int_str_fun = isf_unnamed
+int_named_str_fun = isf_unnamed # E: Incompatible types in assignment (expression has type Callable[[int, str], str], variable has type Callable[[int, Arg(str, 's')], str])
+int_opt_str_fun = iosf
+int_str_fun = iosf
+int_opt_str_fun = isf # E: Incompatible types in assignment (expression has type Callable[[Arg(int, 'ii'), Arg(str, 'ss')], str], variable has type Callable[[int, DefaultArg(str)], str])
+
+int_named_str_fun = isf # E: Incompatible types in assignment (expression has type Callable[[Arg(int, 'ii'), Arg(str, 'ss')], str], variable has type Callable[[int, Arg(str, 's')], str])
+int_named_str_fun = iosf
+
+[builtins fixtures/dict.pyi]
+
+-- Callable[..., T]
+-- ----------------
+
+
+[case testCallableWithArbitraryArgs]
+from typing import Callable
+def f(x: Callable[..., int]) -> None:
+    x()
+    x(1)
+    x(z=1)
+    x() + '' # E: Unsupported operand types for + ("int" and "str")
+[out]
+
+[case testCallableWithArbitraryArgs2]
+from typing import Callable
+def f(x: Callable[..., int]) -> None:
+    x(*[1], **{'x': 2})
+[builtins fixtures/dict.pyi]
+
+[case testCastWithCallableAndArbitraryArgs]
+from typing import Callable, cast
+f = cast(Callable[..., int], None)
+f(x=4) + '' # E: Unsupported operand types for + ("int" and "str")
+
+[case testCallableWithArbitraryArgsInErrorMessage]
+from typing import Callable
+def f(x: Callable[..., int]) -> None:
+    x = 1  # E: Incompatible types in assignment (expression has type "int", variable has type Callable[..., int])
+[out]
+
+[case testCallableWithArbitraryArgsInGenericFunction]
+from typing import Callable, TypeVar
+T = TypeVar('T')
+def f(x: Callable[..., T]) -> T: pass
+def g(*x: int) -> str: pass
+x = f(g)
+x + 1 # E: Unsupported left operand type for + ("str")
+[builtins fixtures/list.pyi]
+
+[case testCallableWithArbitraryArgsSubtyping]
+from typing import Callable
+def f(x: Callable[..., int]) -> None: pass
+def g1(): pass
+def g2(x, y) -> int: pass
+def g3(*, y: str) -> int: pass
+def g4(*, y: int) -> str: pass
+f(g1)
+f(g2)
+f(g3)
+f(g4) # E: Argument 1 to "f" has incompatible type Callable[[NamedArg(int, 'y')], str]; expected Callable[..., int]
+
+[case testCallableWithArbitraryArgsSubtypingWithGenericFunc]
+from typing import Callable, TypeVar
+T = TypeVar('T')
+def f(x: Callable[..., int]) -> None: pass
+def g1(x: T) -> int: pass
+def g2(*x: T) -> int: pass
+def g3(*x: T) -> T: pass
+f(g1)
+f(g2)
+f(g3)
+
+-- (...) -> T
+-- ----------------
+[case testEllipsisWithArbitraryArgsOnBareFunction]
+def f(x, y, z): # type: (...) -> None
+    pass
+f(1, "hello", [])
+f(x=1, y="hello", z=[])
+[builtins fixtures/dict.pyi]
+
+[case testEllipsisWithArbitraryArgsOnBareFunctionWithDefaults]
+def f(x, y=1, z="hey"): # type: (...) -> None
+    pass
+f(1, "hello", [])
+f(x=1, y="hello", z=[])
+[builtins fixtures/dict.pyi]
+
+[case testEllipsisWithArbitraryArgsOnBareFunctionWithKwargs]
+from typing import Dict
+def f(x, **kwargs): # type: (...) -> None
+    success_dict_type = kwargs # type: Dict[str, str]
+    failure_dict_type = kwargs # type: Dict[int, str] # E: Incompatible types in assignment (expression has type Dict[str, Any], variable has type Dict[int, str])
+f(1, thing_in_kwargs=["hey"])
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testEllipsisWithArbitraryArgsOnBareFunctionWithVarargs]
+from typing import Tuple, Any
+def f(x, *args): # type: (...) -> None
+    success_tuple_type = args # type: Tuple[Any, ...]
+    fail_tuple_type = args # type: None # E: Incompatible types in assignment (expression has type Tuple[Any, ...], variable has type None)
+f(1, "hello")
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testEllipsisWithArbitraryArgsOnInstanceMethod]
+class A:
+    def f(self, x, y, z): # type: (...) -> None
+        pass
+
+[case testEllipsisWithArbitraryArgsOnClassMethod]
+class A:
+    @classmethod
+    def f(cls, x, y, z): # type: (...) -> None
+        pass
+[builtins fixtures/classmethod.pyi]
+
+[case testEllipsisWithArbitraryArgsOnStaticMethod]
+class A:
+    @staticmethod
+    def f(x, y, z): # type: (...) -> None
+        pass
+[builtins fixtures/staticmethod.pyi]
+
+[case testEllipsisWithSomethingAfterItFails]
+def f(x, y, z): # type: (..., int) -> None
+    pass
+[out]
+main:1: error: Ellipses cannot accompany other argument types in function type signature.
+
+[case testEllipsisWithSomethingBeforeItFails]
+def f(x, y, z): # type: (int, ...) -> None
+    pass
+[out]
+main:1: error: Ellipses cannot accompany other argument types in function type signature.
+
+[case testRejectCovariantArgument]
+from typing import TypeVar, Generic
+
+t = TypeVar('t', covariant=True)
+class A(Generic[t]):
+    def foo(self, x: t) -> None:
+        return None
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Cannot use a covariant type variable as a parameter
+
+[case testRejectContravariantReturnType]
+from typing import TypeVar, Generic
+
+t = TypeVar('t', contravariant=True)
+class A(Generic[t]):
+    def foo(self) -> t:
+        return None
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Cannot use a contravariant type variable as return type
+
+[case testAcceptCovariantReturnType]
+from typing import TypeVar, Generic
+
+t = TypeVar('t', covariant=True)
+class A(Generic[t]):
+    def foo(self) -> t:
+        return None
+[builtins fixtures/bool.pyi]
+[case testAcceptContravariantArgument]
+from typing import TypeVar, Generic
+
+t = TypeVar('t', contravariant=True)
+class A(Generic[t]):
+    def foo(self, x: t) -> None:
+        return None
+[builtins fixtures/bool.pyi]
+
+
+-- Redefining functions
+-- --------------------
+
+
+[case testRedefineFunction]
+from typing import Any
+def f(x) -> Any: pass
+def g(x, y): pass
+def h(x): pass
+def j(y) -> Any: pass
+f = h
+f = j # E: Incompatible types in assignment (expression has type Callable[[Arg(Any, 'y')], Any], variable has type Callable[[Arg(Any, 'x')], Any])
+f = g # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[Any], Any])
+
+[case testRedefineFunction2]
+def f() -> None: pass
+def f() -> None: pass # E: Name 'f' already defined on line 1
+
+
+-- Special cases
+-- -------------
+
+
+[case testFunctionDefinitionWithForStatement]
+for _ in [1]:
+    def f(): pass
+else:
+    def g(): pass
+f()
+g()
+[builtins fixtures/list.pyi]
+
+[case testFunctionDefinitionWithWhileStatement]
+while bool():
+    def f(): pass
+else:
+    def g(): pass
+f()
+g()
+[builtins fixtures/bool.pyi]
+
+[case testBareCallable]
+from typing import Callable, Any
+
+def foo(f: Callable) -> bool:
+    return f()
+
+def f1() -> bool:
+    return False
+
+foo(f1)
+[builtins fixtures/bool.pyi]
+
+[case testFunctionNestedWithinWith]
+from typing import Any
+a = 1  # type: Any
+with a:
+    def f() -> None:
+        pass
+    f(1) # E: Too many arguments for "f"
+
+
+[case testNameForDecoratorMethod]
+from typing import Callable
+
+class A:
+    def f(self) -> None:
+        # In particular, test that the error message contains "g" of "A".
+        self.g() # E: Too few arguments for "g" of "A"
+        self.g(1)
+    @dec
+    def g(self, x: str) -> None: pass
+
+def dec(f: Callable[[A, str], None]) -> Callable[[A, int], None]: pass
+[out]
+
+[case testUnknownFunctionNotCallable]
+def f() -> None:
+    pass
+def g(x: int) -> None:
+    pass
+h = f if bool() else g
+reveal_type(h) # E: Revealed type is 'builtins.function'
+h(7) # E: Cannot call function of unknown type
+[builtins fixtures/bool.pyi]
+
+-- Positional-only arguments
+-- -------------------------
+
+[case testPositionalOnlyArg]
+def f(__a: int) -> None: pass
+
+f(1)
+f(__a=1) # E: Unexpected keyword argument "__a" for "f"
+
+[builtins fixtures/bool.pyi]
+[out]
+main:1: note: "f" defined here
+
+[case testPositionalOnlyArgFastparse]
+
+
+def f(__a: int) -> None: pass
+
+f(1)
+f(__a=1) # E: Unexpected keyword argument "__a" for "f"
+
+[builtins fixtures/bool.pyi]
+[out]
+main:3: note: "f" defined here
+
+[case testMagicMethodPositionalOnlyArg]
+class A(object):
+    def __eq__(self, other) -> bool: return True # We are all equal.  # N: "__eq__" of "A" defined here
+
+a = A()
+a.__eq__(a)
+a.__eq__(other=a) # E: Unexpected keyword argument "other" for "__eq__" of "A"
+
+[builtins fixtures/bool.pyi]
+
+[case testMagicMethodPositionalOnlyArgFastparse]
+
+
+class A(object):
+    def __eq__(self, other) -> bool: return True # We are all equal.  # N: "__eq__" of "A" defined here
+
+a = A()
+a.__eq__(a)
+a.__eq__(other=a) # E: Unexpected keyword argument "other" for "__eq__" of "A"
+
+[builtins fixtures/bool.pyi]
+
+[case testTupleArguments]
+# flags: --python-version 2.7
+
+def f(a, (b, c), d): pass
+
+[case testTupleArgumentsFastparse]
+# flags: --python-version 2.7
+
+def f(a, (b, c), d): pass
+
+-- Type variable shenanagins
+-- -------------------------
+
+[case testGenericFunctionTypeDecl]
+from typing import Callable, TypeVar
+
+T = TypeVar('T')
+
+f: Callable[[T], T]
+reveal_type(f)  # E: Revealed type is 'def [T] (T`-1) -> T`-1'
+def g(__x: T) -> T: pass
+f = g
+reveal_type(f)  # E: Revealed type is 'def [T] (T`-1) -> T`-1'
+i = f(3)
+reveal_type(i)  # E: Revealed type is 'builtins.int*'
+
+[case testFunctionReturningGenericFunction]
+from typing import Callable, TypeVar
+
+T = TypeVar('T')
+def deco() -> Callable[[T], T]: pass
+reveal_type(deco)  # E: Revealed type is 'def () -> def [T] (T`-1) -> T`-1'
+f = deco()
+reveal_type(f)  # E: Revealed type is 'def [T] (T`-1) -> T`-1'
+i = f(3)
+reveal_type(i)  # E: Revealed type is 'builtins.int*'
+
+[case testFunctionReturningGenericFunctionPartialBinding]
+from typing import Callable, TypeVar
+
+T = TypeVar('T')
+U = TypeVar('U')
+
+def deco(x: U) -> Callable[[T, U], T]: pass
+reveal_type(deco)  # E: Revealed type is 'def [U] (x: U`-1) -> def [T] (T`-2, U`-1) -> T`-2'
+f = deco("foo")
+reveal_type(f)  # E: Revealed type is 'def [T] (T`-2, builtins.str*) -> T`-2'
+i = f(3, "eggs")
+reveal_type(i)  # E: Revealed type is 'builtins.int*'
+
+[case testFunctionReturningGenericFunctionTwoLevelBinding]
+from typing import Callable, TypeVar
+
+T = TypeVar('T')
+R = TypeVar('R')
+def deco() -> Callable[[T], Callable[[T, R], R]]: pass
+f = deco()
+reveal_type(f)  # E: Revealed type is 'def [T] (T`-1) -> def [R] (T`-1, R`-2) -> R`-2'
+g = f(3)
+reveal_type(g)  # E: Revealed type is 'def [R] (builtins.int*, R`-2) -> R`-2'
+s = g(4, "foo")
+reveal_type(s)  # E: Revealed type is 'builtins.str*'
+
+[case testGenericFunctionReturnAsDecorator]
+from typing import Callable, TypeVar
+
+T = TypeVar('T')
+def deco(__i: int) -> Callable[[T], T]: pass
+
+ at deco(3)
+def lol(x: int) -> str: ...
+
+reveal_type(lol)  # E: Revealed type is 'def (x: builtins.int) -> builtins.str'
+s = lol(4)
+reveal_type(s)  # E: Revealed type is 'builtins.str'
+
+[case testGenericFunctionOnReturnTypeOnly]
+from typing import TypeVar, List
+
+T = TypeVar('T')
+
+def make_list() -> List[T]: pass
+
+l: List[int] = make_list()
+
+bad = make_list()  # E: Need type annotation for variable
+[builtins fixtures/list.pyi]
+
+[case testAnonymousArgumentError]
+def foo(__b: int, x: int, y: int) -> int: pass
+foo(x=2, y=2)  # E: Missing positional argument
+foo(y=2)  # E: Missing positional arguments
+
+[case testReturnTypeLineNumberWithDecorator]
+def dec(f): pass
+
+ at dec
+def test(a: str) -> (str,): # E: Invalid tuple literal type
+    return None
+
+[case testReturnTypeLineNumberNewLine]
+def fn(a: str
+       ) -> badtype: # E: Name 'badtype' is not defined
+    pass
+
+[case testArgumentTypeLineNumberWithDecorator]
+def dec(f): pass
+
+ at dec
+def some_method(self: badtype): pass # E: Name 'badtype' is not defined
+
+[case TestArgumentTypeLineNumberNewline]
+def fn(
+        a: badtype) -> None: # E: Name 'badtype' is not defined
+    pass
+
+[case testInferredTypeSubTypeOfReturnType]
+from typing import Union, Dict, List
+def f() -> List[Union[str, int]]:
+    x = ['a']
+    return x # E: Incompatible return value type (got List[str], expected List[Union[str, int]]) \
+# N: Perhaps you need a type annotation for "x"? Suggestion: List[Union[str, int]]
+
+def g() -> Dict[str, Union[str, int]]:
+    x = {'a': 'a'}
+    return x # E: Incompatible return value type (got Dict[str, str], expected Dict[str, Union[str, int]]) \
+# N: Perhaps you need a type annotation for "x"? Suggestion: Dict[str, Union[str, int]]
+
+def h() -> Dict[Union[str, int], str]:
+    x = {'a': 'a'}
+    return x # E: Incompatible return value type (got Dict[str, str], expected Dict[Union[str, int], str]) \
+# N: Perhaps you need a type annotation for "x"? Suggestion: Dict[Union[str, int], str]
+
+def i() -> List[Union[int, float]]:
+    x: List[int] = [1]
+    return x # E: Incompatible return value type (got List[int], expected List[Union[int, float]]) \
+# N: Perhaps you need a type annotation for "x"? Suggestion: List[Union[int, float]]
+
+[builtins fixtures/dict.pyi]
+
+[case testInferredTypeNotSubTypeOfReturnType]
+from typing import Union, List
+def f() -> List[Union[int, float]]:
+    x = ['a']
+    return x # E: Incompatible return value type (got List[str], expected List[Union[int, float]])
+
+def g() -> List[Union[str, int]]:
+    x = ('a', 2)
+    return x # E: Incompatible return value type (got "Tuple[str, int]", expected List[Union[str, int]])
+
+[builtins fixtures/list.pyi]
+
+[case testInferredTypeIsObjectMismatch]
+from typing import Union, Dict, List
+def f() -> Dict[str, Union[str, int]]:
+    x = {'a': 'a', 'b': 2}
+    return x # E: Incompatible return value type (got Dict[str, object], expected Dict[str, Union[str, int]])
+
+def g() -> Dict[str, Union[str, int]]:
+    x: Dict[str, Union[str, int]] = {'a': 'a', 'b': 2}
+    return x
+
+def h() -> List[Union[str, int]]:
+    x = ['a', 2]
+    return x # E: Incompatible return value type (got List[object], expected List[Union[str, int]])
+
+def i() -> List[Union[str, int]]:
+    x: List[Union[str, int]] = ['a', 2]
+    return x
+
+[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-generic-subtyping.test b/test-data/unit/check-generic-subtyping.test
new file mode 100644
index 0000000..6cae196
--- /dev/null
+++ b/test-data/unit/check-generic-subtyping.test
@@ -0,0 +1,763 @@
+-- Test cases for the type checker related to subtyping and inheritance with
+-- generics.
+
+
+-- Subtyping + inheritance
+-- -----------------------
+
+
+[case testSubtypingAndInheritingNonGenericTypeFromGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ac = None # type: A[C]
+ad = None # type: A[D]
+b = None # type: B
+
+b = ad # E: Incompatible types in assignment (expression has type A[D], variable has type "B")
+ad = b # E: Incompatible types in assignment (expression has type "B", variable has type A[D])
+b = ac # E: Incompatible types in assignment (expression has type A[C], variable has type "B")
+
+b = b
+ac = b
+
+class C: pass
+class A(Generic[T]): pass
+class B(A[C]): pass
+class D: pass
+
+[case testSubtypingAndInheritingGenericTypeFromNonGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a = None # type: A
+bc = None # type: B[C]
+bd = None # type: B[D]
+
+bc = bd # E: Incompatible types in assignment (expression has type B[D], variable has type B[C])
+bd = bc # E: Incompatible types in assignment (expression has type B[C], variable has type B[D])
+bc = a  # E: Incompatible types in assignment (expression has type "A", variable has type B[C])
+bd = a  # E: Incompatible types in assignment (expression has type "A", variable has type B[D])
+
+a = bc
+a = bd
+
+class A: pass
+class B(A, Generic[T]): pass
+class C: pass
+class D: pass
+
+[case testSubtypingAndInheritingGenericTypeFromGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+ac = None # type: A[C]
+ad = None # type: A[D]
+bcc = None # type: B[C, C]
+bdc = None # type: B[D, C]
+
+ad = bcc # E: Incompatible types in assignment (expression has type B[C, C], variable has type A[D])
+ad = bdc # E: Incompatible types in assignment (expression has type B[D, C], variable has type A[D])
+bcc = ac # E: Incompatible types in assignment (expression has type A[C], variable has type B[C, C])
+bdc = ac # E: Incompatible types in assignment (expression has type A[C], variable has type B[D, C])
+
+bcc = bcc
+bdc = bdc
+ac = bcc
+ac = bdc
+
+class A(Generic[T]): pass
+class B(A[S], Generic[T, S]): pass
+class C: pass
+class D: pass
+
+[case testSubtypingAndInheritingGenericTypeFromGenericTypeAcrossHierarchy]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+X = TypeVar('X')
+Y = TypeVar('Y')
+ae = None # type: A[A[E]]
+af = None # type: A[A[F]]
+
+cef = None # type: C[E, F]
+cff = None # type: C[F, F]
+cfe = None # type: C[F, E]
+
+ae = cef # E: Incompatible types in assignment (expression has type C[E, F], variable has type A[A[E]])
+af = cfe # E: Incompatible types in assignment (expression has type C[F, E], variable has type A[A[F]])
+
+ae = cfe
+af = cef
+af = cff
+
+class A(Generic[T]): pass
+class B(A[S], Generic[T, S]): pass
+class C(B[A[X], A[Y]], Generic[X, Y]): pass
+class E: pass
+class F: pass
+
+[case testIncludingBaseClassTwice]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class I(Generic[t]): pass
+class A(I[C], I[object]): pass # E: Duplicate base class "I"
+class C: pass
+
+
+-- Accessing inherited generic members
+-- -----------------------------------
+
+
+[case testAccessingMethodInheritedFromGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+b = None # type: B[C, D]
+c, d = None, None # type: (C, D)
+
+b.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "D"
+b.f(d)
+
+class A(Generic[T]):
+    def f(self, a: T) -> None:
+        pass
+class B(A[S], Generic[T, S]): pass
+class C: pass
+class D: pass
+
+[case testAccessingMethodInheritedFromGenericTypeInNonGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+b, c, d = None, None, None # type: (B, C, D)
+
+b.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "D"
+b.f(d)
+
+class C: pass
+class D: pass
+class A(Generic[T]):
+    def f(self, a: T) -> None:
+        pass
+class B(A[D]): pass
+
+[case testAccessingMemberVarInheritedFromGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class A(Generic[T]):
+    def __init__(self, a: T) -> None:
+        self.a = a
+
+b = None # type: B[C, D]
+c, d = None, None # type: (C, D)
+
+b.a = c # E: Incompatible types in assignment (expression has type "C", variable has type "D")
+b.a = d
+
+class B(A[S], Generic[T, S]): pass
+class C: pass
+class D: pass
+
+
+-- Overriding with generic types
+-- -----------------------------
+
+
+[case testOverridingMethodInSimpleTypeInheritingGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class B(Generic[T]):
+    def f(self, a: T) -> None: pass
+    def g(self, a: T) -> None: pass
+class C: pass
+class D: pass
+class A(B[C]):
+    def f(self, a: D) -> None: pass \
+        # E: Argument 1 of "f" incompatible with supertype "B"
+    def g(self, a: C) -> None: pass
+[out]
+
+[case testOverridingMethodInGenericTypeInheritingSimpleType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C: pass
+class B:
+    def f(self, a: C) -> None: pass
+    def g(self, a: C) -> None: pass
+class A(B, Generic[T]):
+    def f(self, a: T) -> None: pass \
+        # E: Argument 1 of "f" incompatible with supertype "B"
+    def g(self, a: 'C') -> None: pass
+[out]
+
+[case testOverridingMethodInGenericTypeInheritingGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class B(Generic[T]):
+    def f(self, a: T) -> None: pass
+    def g(self, a: T) -> None: pass
+class A(B[S], Generic[T, S]):
+    def f(self, a: T) -> None: pass \
+        # E: Argument 1 of "f" incompatible with supertype "B"
+    def g(self, a: S) -> None: pass
+[out]
+
+[case testOverridingMethodInMultilevelHierarchyOfGenericTypes]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+U = TypeVar('U')
+V = TypeVar('V')
+
+class D: pass
+class C(Generic[T, U, V]):
+    def f(self, a: V) -> None: pass
+    def g(self, a: V) -> None: pass
+class B(C[D, D, T], Generic[T]): pass
+class A(B[S], Generic[T, S]):
+    def f(self, a: T) -> None: pass \
+        # E: Argument 1 of "f" incompatible with supertype "C"
+    def g(self, a: S) -> None: pass
+[out]
+
+[case testOverrideGenericMethodInNonGenericClass]
+from typing import TypeVar
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A:
+    def f(self, x: T, y: S) -> None: pass
+class B(A):
+    def f(self, x: S, y: T) -> None: pass
+class C(A):
+    # Okay, because T = object allows any type for the arguments.
+    def f(self, x: T, y: T) -> None: pass
+
+[case testOverrideGenericMethodInNonGenericClassLists]
+from typing import TypeVar, List
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A:
+    def f(self, x: List[T], y: List[S]) -> None: pass
+class B(A):
+    def f(self, x: List[S], y: List[T]) -> None: pass
+class C(A):
+    def f(self, x: List[T], y: List[T]) -> None: pass # E: Signature of "f" incompatible with supertype "A"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testOverrideGenericMethodInNonGenericClassGeneralize]
+from typing import TypeVar
+
+T = TypeVar('T')
+T1 = TypeVar('T1', bound=str)
+S = TypeVar('S')
+
+class A:
+    def f(self, x: int, y: S) -> None: pass
+class B(A):
+    def f(self, x: T, y: S) -> None: pass
+class C(A):
+    def f(self, x: T, y: str) -> None: pass
+class D(A):
+    def f(self, x: T1, y: S) -> None: pass # TODO: This error could be more specific.
+[out]
+main:12: error: Argument 2 of "f" incompatible with supertype "A"
+main:14: error: Signature of "f" incompatible with supertype "A"
+
+
+-- Inheritance from generic types with implicit dynamic supertype
+-- --------------------------------------------------------------
+
+
+[case testInheritanceFromGenericWithImplicitDynamicAndSubtyping]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a = None # type: A
+bc = None # type: B[C]
+bd = None # type: B[D]
+
+a = bc # E: Incompatible types in assignment (expression has type B[C], variable has type "A")
+bc = a
+bd = a
+
+class B(Generic[T]): pass
+class A(B): pass
+class C: pass
+class D: pass
+[out]
+
+[case testInheritanceFromGenericWithImplicitDynamicAndExternalAccess]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+
+class B(Generic[T]):
+    def f(self, a: 'B[T]') -> None: pass
+    def __init__(self, x: 'B[T]') -> None:
+        self.x = x
+class A(B): pass
+class C: pass
+
+a = None # type: A
+c = None # type: C
+bc = None # type: B[C]
+
+a.x = c # E: Incompatible types in assignment (expression has type "C", variable has type B[Any])
+a.f(c)  # E: Argument 1 to "f" of "B" has incompatible type "C"; expected B[Any]
+a.x = bc
+a.f(bc)
+[out]
+
+[case testInheritanceFromGenericWithImplicitDynamic]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a = None # type: A
+c = None # type: C
+bc = None # type: B[C]
+
+class B(Generic[T]):
+  def f(self, a: 'B[T]') -> None: pass
+  def __init__(self, x: 'B[T]') -> None:
+    self.x = x
+
+class A(B):
+  def g(self) -> None:
+    self.x = c # E: Incompatible types in assignment (expression has type "C", variable has type B[Any])
+    self.f(c)  # E: Argument 1 to "f" of "B" has incompatible type "C"; expected B[Any]
+    self.x = bc
+    self.f(bc)
+
+class C: pass
+[out]
+
+[case testInheritanceFromGenericWithImplicitDynamicAndOverriding]
+from typing import TypeVar, Generic, Tuple
+T = TypeVar('T')
+class B(Generic[T]):
+    def f(self, a: T, b: 'Tuple[T, B[T]]') -> None:
+        pass
+class A(B):
+    def f(self, a, b): pass
+[builtins fixtures/tuple.pyi]
+[out]
+
+
+-- Inheritance from generic types and super expressions
+-- ----------------------------------------------------
+
+
+[case testSuperExpressionsWhenInheritingFromGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class B(Generic[T]):
+    def f(self, a: T) -> None: pass
+class A(B[S], Generic[T, S]):
+    def g(self, t: T, s: S) -> None:
+        super().f(t)   # E: Argument 1 to "f" of "B" has incompatible type "T"; expected "S"
+        super().f(s)
+[out]
+
+[case testSuperExpressionsWhenInheritingFromGenericTypeAndDeepHierarchy]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+U = TypeVar('U')
+V = TypeVar('V')
+class C(Generic[T, U, V]):
+    def f(self, a: V) -> None: pass
+class D: pass
+class B(C[D, D, T], Generic[T]): pass
+class A(B[S], Generic[T, S]):
+    def g(self, t: T, s: S) -> None:
+        super().f(t)   # E: Argument 1 to "f" of "C" has incompatible type "T"; expected "S"
+        super().f(s)
+[out]
+
+
+-- Type of inherited constructor
+-- -----------------------------
+
+
+[case testInheritedConstructor]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def __init__(self, x: T) -> None: pass
+class B(A[T], Generic[T]): pass
+class C(A[int]): pass
+class D(A[A[T]], Generic[T]): pass
+B(1)
+C(1)
+C('a')  # E: Argument 1 to "C" has incompatible type "str"; expected "int"
+D(A(1))
+D(1)  # E: Argument 1 to "D" has incompatible type "int"; expected A[<nothing>]
+
+
+[case testInheritedConstructor2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+U = TypeVar('U')
+Z = TypeVar('Z')
+class A(Generic[T, U]):
+    def __init__(self, x: T, y: U, z: Z) -> None: pass
+class B(A[int, T], Generic[T]): pass
+class C(B[A[T, str]], Generic[T, U]): pass
+# C[T, U] <: B[A[T, str]] <: A[int, A[T, str]]
+C(1, A(1, 'a', 0), 'z')
+C(1, A('1', 'a', 0), 'z')
+C('1', A(1, 'a', 0), 'z')  # E: Argument 1 to "C" has incompatible type "str"; expected "int"
+C(1, A(1, 1, 0), 'z')  # E: Argument 2 to "A" has incompatible type "int"; expected "str"
+
+
+-- Subtyping with a generic abstract base class
+-- --------------------------------------------
+
+
+[case testSubtypingWithGenericTypeSubclassingGenericAbstractClass]
+from typing import TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+S = TypeVar('S')
+acd = None # type: A[C, D]
+adc = None # type: A[D, C]
+ic = None # type: I[C]
+id = None # type: I[D]
+
+ic = acd # E: Incompatible types in assignment (expression has type A[C, D], variable has type I[C])
+id = adc # E: Incompatible types in assignment (expression has type A[D, C], variable has type I[D])
+adc = ic # E: Incompatible types in assignment (expression has type I[C], variable has type A[D, C])
+
+ic = adc
+id = acd
+
+class I(Generic[T]):
+    @abstractmethod
+    def f(self): pass
+class A(I[S], Generic[T, S]): pass
+class C: pass
+class D: pass
+
+[case testSubtypingWithTypeImplementingGenericABCViaInheritance]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+a, b = None, None # type: (A, B)
+ic, id, ie = None, None, None # type: (I[C], I[D], I[E])
+
+class I(Generic[S]): pass
+class B(I[C]): pass
+class A(B): pass
+
+ie = a # E: Incompatible types in assignment (expression has type "A", variable has type I[E])
+a = ic # E: Incompatible types in assignment (expression has type I[C], variable has type "A")
+a = id # E: Incompatible types in assignment (expression has type I[D], variable has type "A")
+a = b  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+id = a # E: Incompatible types in assignment (expression has type "A", variable has type I[D])
+
+ic = a
+b = a
+
+class C: pass
+class D: pass
+class E: pass
+[out]
+
+[case testSubtypingWithTypeImplementingGenericABCViaInheritance2-skip]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class I(Generic[T]): pass
+class A(I[C]): pass
+class B(A, I[D]): pass # Fail
+
+class C: pass
+class D: pass
+[out]
+main:5: error: Class "B" has base "I" duplicated inconsistently
+
+[case testSubtypingAndABCExtension]
+from typing import TypeVar, Generic
+from abc import abstractmethod, ABCMeta
+t = TypeVar('t')
+a, i, j = None, None, None # type: (A[object], I[object], J[object])
+(ii, jj) = (i, j)
+ii = a
+jj = a
+jj = i
+a = i # E: Incompatible types in assignment (expression has type I[object], variable has type A[object])
+a = j # E: Incompatible types in assignment (expression has type J[object], variable has type A[object])
+
+class J(Generic[t]): pass
+class X(metaclass=ABCMeta): pass
+class I(X, J[t], Generic[t]): pass
+class A(I[t], Generic[t]): pass
+
+
+-- Subclassing a generic ABC
+-- -------------------------
+
+
+[case testSubclassingGenericABC1]
+from typing import TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T) -> None: pass
+    @abstractmethod
+    def g(self, a: T) -> None: pass
+class A(I[C]):
+    def f(self, a: 'D') -> None: pass \
+        # E: Argument 1 of "f" incompatible with supertype "I"
+    def g(self, a: 'C') -> None: pass
+class C: pass
+class D: pass
+[out]
+
+
+-- Extending a generic ABC with deep type hierarchy
+-- ------------------------------------------------
+
+
+[case testSubclassingGenericABCWithDeepHierarchy]
+from typing import Any, TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+a = None # type: A
+ic, id = None, None # type: (I[C], I[D])
+
+id = a # Fail
+ic = a
+
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T, b: T) -> None: pass
+    @abstractmethod
+    def g(self, a: T, b: 'D') -> None: pass
+class B(I[C]):
+    def f(self, a: 'C', b: 'C') -> None: pass
+    def g(self, a: 'C', b: Any) -> None: pass
+class A(B):
+    def g(self, a: 'C', b: 'C') -> None: pass \
+        # E: Argument 2 of "g" incompatible with supertype "I"
+    def f(self, a: 'C', b: 'C') -> None: pass
+class C: pass
+class D: pass
+[out]
+main:7: error: Incompatible types in assignment (expression has type "A", variable has type I[D])
+
+[case testSubclassingGenericABCWithDeepHierarchy2]
+from typing import Any, TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T, b: T) -> None: pass
+class B(I[C]):
+    def f(self, a: 'C', b: Any) -> None: pass
+class A(B):
+    def f(self, a: 'C', b: 'D') -> None: pass \
+        # E: Argument 2 of "f" incompatible with supertype "I"
+class C: pass
+class D: pass
+[out]
+
+
+-- Implicit Any types and subclassing generic ABC
+-- ----------------------------------------------
+
+
+[case testSubclassingGenericABCWithImplicitAny]
+from typing import Any, TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+a = None # type: Any
+ic = None # type: I[C]
+id = None # type: I[D]
+
+ic = a
+id = a
+
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T) -> None: pass
+class A(I):
+    def f(self, a): pass
+
+class C: pass
+class D: pass
+
+[case testSubclassingGenericABCWithImplicitAnyAndDeepHierarchy]
+from typing import Any, TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+a = None # type: Any
+ic = None # type: I[C]
+id = None # type: I[D]
+
+ic = a
+id = a
+
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T, b: T) -> None: pass
+class B(I):
+    def f(self, a, b): pass
+class A(B):
+    def f(self, a: 'C', b: 'D') -> None: pass
+class C: pass
+class D: pass
+
+[case testImplementingGenericABCWithImplicitAnyAndDeepHierarchy2]
+from typing import Any, TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+a = None # type: Any
+jc = None # type: J[C]
+jd = None # type: J[D]
+
+jc = a
+jd = a
+
+class J(Generic[T]):
+    @abstractmethod
+    def f(self, a: T, b: T) -> None: pass
+class I(J):
+    @abstractmethod
+    def f(self, a, b): pass
+class A(I):
+    def f(self, a: 'C', b: 'D') -> None: pass
+
+class C: pass
+class D: pass
+
+
+-- Accessing generic ABC members
+-- -----------------------------
+
+
+[case testAccessingGenericABCMembers]
+from typing import TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T) -> None: pass
+class A: pass
+class B: pass
+
+a, b = None, None # type: (A, B)
+ia = None # type: I[A]
+
+ia.f(b)  # E: Argument 1 to "f" of "I" has incompatible type "B"; expected "A"
+ia.f(a)
+
+[case testAccessingInheritedGenericABCMembers]
+from typing import TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+class J(Generic[T]):
+    @abstractmethod
+    def f(self, a: T) -> None: pass
+class I(J[T], Generic[T]): pass
+class A: pass
+class B: pass
+a, b = None, None # type: (A, B)
+ia = None # type: I[A]
+
+ia.f(b)  # E: Argument 1 to "f" of "J" has incompatible type "B"; expected "A"
+ia.f(a)
+
+
+-- Misc
+-- ----
+
+
+[case testMultipleAssignmentAndGenericSubtyping]
+from typing import Iterable
+n, s = None, None # type: int, str
+class Nums(Iterable[int]):
+    def __iter__(self): pass
+    def __next__(self): pass
+n, n = Nums()
+s, s = Nums() # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Variance
+-- --------
+
+
+[case testCovariant]
+from typing import TypeVar, Generic
+T = TypeVar('T', covariant=True)
+
+class G(Generic[T]): pass
+class A: pass
+class B(A): pass
+class C(B): pass
+
+a = None  # type: G[A]
+b = None  # type: G[B]
+c = None  # type: G[C]
+
+b = a  # E: Incompatible types in assignment (expression has type G[A], variable has type G[B])
+b = c
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testContravariant]
+from typing import TypeVar, Generic
+T = TypeVar('T', contravariant=True)
+
+class G(Generic[T]): pass
+class A: pass
+class B(A): pass
+class C(B): pass
+
+a = None  # type: G[A]
+b = None  # type: G[B]
+c = None  # type: G[C]
+
+b = a
+b = c  # E: Incompatible types in assignment (expression has type G[C], variable has type G[B])
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testInvariant]
+from typing import TypeVar, Generic
+T = TypeVar('T')  # invariant (default)
+
+class G(Generic[T]): pass
+class A: pass
+class B(A): pass
+class C(B): pass
+
+a = None  # type: G[A]
+b = None  # type: G[B]
+c = None  # type: G[C]
+
+b = a  # E: Incompatible types in assignment (expression has type G[A], variable has type G[B])
+b = c  # E: Incompatible types in assignment (expression has type G[C], variable has type G[B])
+[builtins fixtures/bool.pyi]
+[out]
+
+
+[case testTypeVarSubtypeUnion]
+from typing import Union, TypeVar, Generic
+
+class U: pass
+class W: pass
+
+T = TypeVar('T', bound=Union[U, W])
+
+class Y(Generic[T]):
+    def __init__(self) -> None:
+        pass
+    def f(self) -> T:
+        return U()  # E: Incompatible return value type (got "U", expected "T")
+
+[out]
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
new file mode 100644
index 0000000..bb05e03
--- /dev/null
+++ b/test-data/unit/check-generics.test
@@ -0,0 +1,1749 @@
+-- Simple generic types
+-- --------------------
+
+
+[case testGenericMethodReturnType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a, b, c = None, None, None # type: (A[B], B, C)
+c = a.f() # Fail
+b = a.f()
+
+class A(Generic[T]):
+    def f(self) -> T: pass
+
+class B: pass
+class C: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "C")
+
+[case testGenericMethodArgument]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a.f(c) # Fail
+a.f(b)
+
+a = None # type: A[B]
+b = None # type: B
+c = None # type: C
+
+class A(Generic[T]):
+    def f(self, a: T) -> None: pass
+
+class B: pass
+class C: pass
+[out]
+main:3: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B"
+
+[case testGenericMemberVariable]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def __init__(self, v: T) -> None:
+        self.v = v
+
+a, b, c = None, None, None # type: (A[B], B, C)
+a.v = c # Fail
+a.v = b
+
+class B: pass
+class C: pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+[case testGenericMemberVariable]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a, b, c = None, None, None # type: (A[B], B, C)
+a.v = c # Fail
+a.v = b
+
+class A(Generic[T]):
+    v = None # type: T
+class B: pass
+class C: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+[case testSimpleGenericSubtyping]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+b, bb, c = None, None, None # type: (A[B], A[B], A[C])
+c = b # Fail
+b = c # Fail
+
+b = b
+b = bb
+
+class A(Generic[T]): pass
+class B: pass
+class C(B): pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
+main:5: error: Incompatible types in assignment (expression has type A[C], variable has type A[B])
+
+[case testGenericTypeCompatibilityWithAny]
+from typing import Any, TypeVar, Generic
+T = TypeVar('T')
+b, c, d = None, None, None # type: (A[B], A[C], A[Any])
+
+b = d
+c = d
+d = b
+d = c
+
+class A(Generic[T]): pass
+class B: pass
+class C(B): pass
+[out]
+
+[case testTypeVariableAsTypeArgument]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a = None # type: A[B]
+b = None # type: A[B]
+c = None # type: A[C]
+
+a.v = c # Fail
+c = a.v # Fail
+a.v = b
+b = a.v
+
+class A(Generic[T]):
+    v = None # type: A[T]
+
+class B: pass
+class C: pass
+[out]
+main:7: error: Incompatible types in assignment (expression has type A[C], variable has type A[B])
+main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
+
+[case testMultipleGenericTypeParametersWithMemberVars]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+a = None # type: A[B, C]
+s = None # type: B
+t = None # type: C
+
+t = a.s # Fail
+s = a.t # Fail
+
+s = a.s
+t = a.t
+
+class A(Generic[S, T]):
+    s = None # type: S
+    t = None # type: T
+class B: pass
+class C: pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "B", variable has type "C")
+main:9: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+[case testMultipleGenericTypeParametersWithMethods]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+a = None # type: A[B, C]
+s = None # type: B
+t = None # type: C
+
+a.f(s, s) # Fail
+a.f(t, t) # Fail
+a.f(s, t)
+
+class A(Generic[S, T]):
+    def f(self, s: S, t: T) -> None: pass
+class B: pass
+class C: pass
+[out]
+main:8: error: Argument 2 to "f" of "A" has incompatible type "B"; expected "C"
+main:9: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B"
+
+[case testMultipleGenericTypeParametersAndSubtyping]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+bc = None # type: A[B, C]
+bb = None # type: A[B, B]
+cb = None # type: A[C, B]
+
+bb = bc # Fail
+bb = cb # Fail
+bc = bb # Fail
+
+bb = bb
+bc = bc
+
+class A(Generic[S, T]):
+    s = None # type: S
+    t = None # type: T
+
+class B: pass
+class C(B):pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type A[B, C], variable has type A[B, B])
+main:9: error: Incompatible types in assignment (expression has type A[C, B], variable has type A[B, B])
+main:10: error: Incompatible types in assignment (expression has type A[B, B], variable has type A[B, C])
+
+
+-- Simple generic type bodies
+-- --------------------------
+
+
+[case testGenericTypeBody1]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    a = None # type: T
+
+    def f(self, b: T) -> T:
+        self.f(x)     # Fail
+        d = self # type: A[B] # Fail
+        self.a = self.f(self.a)
+        return self.a
+        c = self # type: A[T]
+x = None # type: B
+class B: pass
+[out]
+main:7: error: Argument 1 to "f" of "A" has incompatible type "B"; expected "T"
+main:8: error: Incompatible types in assignment (expression has type A[T], variable has type A[B])
+
+[case testGenericTypeBodyWithMultipleVariables]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+class A(Generic[S, T]):
+    def f(self) -> None:
+        s = None # type: S
+        t = None # type: T
+        s = t # Fail
+        t = s # Fail
+        a = self # type: A[S, B] # Fail
+        b = self # type: A[T, T] # Fail
+        c = self # type: A[S, T]
+        t = t
+
+class B: pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "T", variable has type "S")
+main:9: error: Incompatible types in assignment (expression has type "S", variable has type "T")
+main:10: error: Incompatible types in assignment (expression has type A[S, T], variable has type A[S, B])
+main:11: error: Incompatible types in assignment (expression has type A[S, T], variable has type A[T, T])
+
+[case testCompatibilityOfNoneWithTypeVar]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def f(self) -> None:
+        a = None # type: T
+        a = None
+[out]
+
+[case testCompatibilityOfTypeVarWithObject]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def f(self) -> T:
+        a = object() # type: T  # Fail
+        a = object()    # Fail
+        b = self.f() # type: object
+        b = self.f()
+        return None
+[out]
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "T")
+main:6: error: Incompatible types in assignment (expression has type "object", variable has type "T")
+
+
+-- Operations with generic types
+-- -----------------------------
+
+
+[case testGenericOperations]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+a = None # type: A[B, C]
+b = None # type: B
+c = None # type: C
+
+b = a + b # Fail
+c = a + c # Fail
+c = a[c]  # Fail
+b = a[b]  # Fail
+
+c = a + b
+b = a[c]
+
+class A(Generic[S, T]):
+    def __add__(self, a: S) -> T: pass
+    def __getitem__(self, i: T) -> S: pass
+
+class B: pass
+class C: pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+main:9: error: Unsupported operand types for + (A[B, C] and "C")
+main:10: error: Incompatible types in assignment (expression has type "B", variable has type "C")
+main:11: error: Invalid index type "B" for A[B, C]; expected type "C"
+
+[case testOperatorAssignmentWithIndexLvalue1]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+b = None # type: B
+c = None # type: C
+ac = None # type: A[C]
+
+ac[b] += b # Fail
+ac[c] += c # Fail
+ac[b] += c
+ac[b] = ac[b] + c
+
+class A(Generic[T]):
+    def __getitem__(self, i: 'B') -> T: pass
+    def __setitem__(self, i: 'B', v: T) -> None: pass
+
+class B: pass
+class C:
+    def __add__(self, o: 'C') -> 'C': pass
+[out]
+main:7: error: Unsupported operand types for + ("C" and "B")
+main:8: error: Invalid index type "C" for A[C]; expected type "B"
+
+[case testOperatorAssignmentWithIndexLvalue2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+b = None # type: B
+c = None # type: C
+ac = None # type: A[C]
+
+ac[b] += c        # Fail
+ac[c] += c        # Fail
+ac[b] = ac[b] + c # Fail
+
+class A(Generic[T]):
+    def __getitem__(self, i: 'B') -> T: pass
+    def __setitem__(self, i: 'C', v: T) -> None: pass
+
+class B: pass
+class C:
+    def __add__(self, o: 'C') -> 'C': pass
+[out]
+main:7: error: Invalid index type "B" for A[C]; expected type "C"
+main:8: error: Invalid index type "C" for A[C]; expected type "B"
+main:9: error: Invalid index type "B" for A[C]; expected type "C"
+
+
+-- Nested generic types
+-- --------------------
+
+
+[case testNestedGenericTypes]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+aab = None # type: A[A[B]]
+aac = None # type: A[A[C]]
+ab = None # type: A[B]
+ac = None # type: A[C]
+
+ac = aab.x # Fail
+ac.y = aab # Fail
+
+ab = aab.x
+ac = aac.x
+ab.y = aab
+ac.y = aac
+
+class A(Generic[T]):
+    x = None # type: T
+    y = None # type: A[A[T]]
+
+class B:
+    pass
+class C:
+    pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
+main:9: error: Incompatible types in assignment (expression has type A[A[B]], variable has type A[A[C]])
+
+
+-- Generic functions
+-- -----------------
+
+
+[case testTypeCheckingGenericFunctionBody]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+class A: pass
+class p(Generic[T, S]):
+    def __init__(self, t: T, a: S) -> None: pass
+def f(s: S, t: T) -> p[T, A]:
+    a = t # type: S # E: Incompatible types in assignment (expression has type "T", variable has type "S")
+    s = t           # E: Incompatible types in assignment (expression has type "T", variable has type "S")
+    p_s_a = None  # type: p[S, A]
+    if s:
+        return p_s_a # E: Incompatible return value type (got p[S, A], expected p[T, A])
+    b = t # type: T
+    c = s # type: S
+    p_t_a = None  # type: p[T, A]
+    return p_t_a
+[out]
+
+[case testTypeCheckingGenericMethodBody]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class p(Generic[T, S]):
+    def __init__(self, t: T, a: S) -> None: pass
+class A(Generic[T]):
+    def f(self, s: S, t: T) -> p[S, T]:
+        s = t # E: Incompatible types in assignment (expression has type "T", variable has type "S")
+        p_s_s = None  # type: p[S, S]
+        if s:
+            return p_s_s # E: Incompatible return value type (got p[S, S], expected p[S, T])
+        p_t_t = None  # type: p[T, T]
+        if t:
+            return p_t_t # E: Incompatible return value type (got p[T, T], expected p[S, T])
+        t = t
+        s = s
+        p_s_t = None  # type: p[S, T]
+        return p_s_t
+[out]
+
+[case testProhibitTypeApplicationToGenericFunctions]
+from typing import TypeVar
+T = TypeVar('T')
+def f(x: T) -> T: pass
+
+y = f[int]  # E: Type application is only supported for generic classes
+[out]
+
+
+-- Generic types in expressions
+-- ----------------------------
+
+
+[case testTypeApplicationArgs]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+Node[int]()  # E: Too few arguments for "Node"
+Node[int](1, 1, 1)  # E: Too many arguments for "Node"
+[out]
+
+[case testTypeApplicationTvars]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class A(Generic[T, S]): pass
+A[int]()  # E: Type application has too few types (2 expected)
+A[int, str, int]() # E: Type application has too many types (2 expected)
+[out]
+
+[case testInvalidTypeApplicationType]
+a = None # type: A
+class A: pass
+a[A]()  # E: Value of type "A" is not indexable
+A[A]()  # E: The type Type[A] is not generic and not indexable
+[out]
+
+[case testTypeApplicationArgTypes]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+
+Node[int](1)
+Node[int]('a')  # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
+
+class Dummy(Generic[T]):
+    def meth(self, x: T) -> None:
+        ...
+    def methout(self) -> T:
+        ...
+
+Dummy[int]().meth(1)
+Dummy[int]().meth('a')  # E: Argument 1 to "meth" of "Dummy" has incompatible type "str"; expected "int"
+reveal_type(Dummy[int]())  # E: Revealed type is '__main__.Dummy[builtins.int*]'
+reveal_type(Dummy[int]().methout())  # E: Revealed type is 'builtins.int*'
+[out]
+
+[case testTypeApplicationArgTypesSubclasses]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class C(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        ...
+
+class D(C[int, T]): ...
+
+D[str](1, 'a')
+D[str](1, 1)  # E: Argument 2 to "D" has incompatible type "int"; expected "str"
+
+class E(D[str]): ...
+E(1, 'a')
+E(1, 1)  # E: Argument 2 to "E" has incompatible type "int"; expected "str"
+[out]
+
+[case testTypeApplicationAlias]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+
+Alias = Node
+Alias[int](1)
+Alias[int]("a")  # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
+[out]
+
+[case testTypeApplicationCrash]
+type[int] # this was crashing, see #2302 (comment)  # E: The type Type[type] is not generic and not indexable
+[out]
+
+
+-- Generic type aliases
+-- --------------------
+
+[case testGenericTypeAliasesBasic]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        ...
+
+IntNode = Node[int, S]
+IntIntNode = Node[int, int]
+SameNode = Node[T, T]
+
+n = Node(1, 1) # type: IntIntNode
+n1 = Node(1, 'a') # type: IntIntNode # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
+
+m = Node(1, 1) # type: IntNode
+m1 = Node('x', 1) # type: IntNode # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
+m2 = Node(1, 1) # type: IntNode[str] # E: Argument 2 to "Node" has incompatible type "int"; expected "str"
+
+s = Node(1, 1) # type: SameNode[int]
+reveal_type(s) # E: Revealed type is '__main__.Node[builtins.int, builtins.int]'
+s1 = Node(1, 'x') # type: SameNode[int] # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
+
+[out]
+
+[case testGenericTypeAliasesBasic2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        ...
+
+IntNode = Node[int, S]
+IntIntNode = Node[int, int]
+SameNode = Node[T, T]
+
+def output_bad() -> IntNode[str]:
+    return Node(1, 1) # Eroor - bad return type, see out
+
+def input(x: IntNode[str]) -> None:
+    pass
+input(Node(1, 's'))
+input(Node(1, 1)) # E: Argument 2 to "Node" has incompatible type "int"; expected "str"
+
+def output() -> IntNode[str]:
+    return Node(1, 'x')
+reveal_type(output()) # E: Revealed type is '__main__.Node[builtins.int, builtins.str]'
+
+def func(x: IntNode[T]) -> IntNode[T]:
+    return x
+reveal_type(func) # E: Revealed type is 'def [T] (x: __main__.Node[builtins.int, T`-1]) -> __main__.Node[builtins.int, T`-1]'
+
+func(1) # E: Argument 1 to "func" has incompatible type "int"; expected Node[int, <nothing>]
+func(Node('x', 1)) # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
+reveal_type(func(Node(1, 'x'))) # E: Revealed type is '__main__.Node[builtins.int, builtins.str*]'
+
+def func2(x: SameNode[T]) -> SameNode[T]:
+    return x
+reveal_type(func2) # E: Revealed type is 'def [T] (x: __main__.Node[T`-1, T`-1]) -> __main__.Node[T`-1, T`-1]'
+
+func2(Node(1, 'x')) # E: Cannot infer type argument 1 of "func2"
+y = func2(Node('x', 'x'))
+reveal_type(y) # E: Revealed type is '__main__.Node[builtins.str*, builtins.str*]'
+
+def wrap(x: T) -> IntNode[T]:
+    return Node(1, x)
+
+z = None # type: str
+reveal_type(wrap(z)) # E: Revealed type is '__main__.Node[builtins.int, builtins.str*]'
+
+[out]
+main:13: error: Argument 2 to "Node" has incompatible type "int"; expected "str"
+
+[case testGenericTypeAliasesWrongAliases]
+# flags: --show-column-numbers --python-version 3.6
+from typing import TypeVar, Generic, List, Callable, Tuple, Union
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        ...
+
+A = Node[T] # Error
+B = Node[T, T]
+C = Node[T, T, T] # Error
+D = Node[T, S]
+E = Node[Node[T, T], List[T]]
+
+F = Node[List[T, T], S] # Error
+G = Callable[..., List[T, T]] # Error
+H = Union[int, Tuple[T, Node[T]]] # Error
+h: H # Error
+h1: H[int, str] # Two errors here, wrong number of args for H, and for Node
+
+x = None # type: D[int, str]
+reveal_type(x)
+y = None # type: E[int]
+reveal_type(y)
+
+X = T # Error
+
+[builtins fixtures/list.pyi]
+[out]
+main:9:4: error: "Node" expects 2 type arguments, but 1 given
+main:11:4: error: "Node" expects 2 type arguments, but 3 given
+main:15:9: error: "list" expects 1 type argument, but 2 given
+main:16:18: error: "list" expects 1 type argument, but 2 given
+main:17:24: error: "Node" expects 2 type arguments, but 1 given
+main:18:3: error: "Node" expects 2 type arguments, but 1 given
+main:19:4: error: Bad number of arguments for type alias, expected: 1, given: 2
+main:19:4: error: "Node" expects 2 type arguments, but 1 given
+main:22:0: error: Revealed type is '__main__.Node[builtins.int, builtins.str]'
+main:24:0: error: Revealed type is '__main__.Node[__main__.Node[builtins.int, builtins.int], builtins.list[builtins.int]]'
+main:26:4: error: Type variable "__main__.T" is invalid as target for type alias
+
+[case testGenericTypeAliasesForAliases]
+from typing import TypeVar, Generic, List, Union
+T = TypeVar('T')
+S = TypeVar('S')
+
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        pass
+
+ListedNode = Node[List[T], List[S]]
+Second = ListedNode[int, T]
+Third = Union[int, Second[str]]
+
+def f2(x: T) -> Second[T]:
+    return Node([1], [x])
+reveal_type(f2('a')) # E: Revealed type is '__main__.Node[builtins.list[builtins.int], builtins.list[builtins.str*]]'
+
+def f3() -> Third:
+    return Node([1], ['x'])
+reveal_type(f3()) # E: Revealed type is 'Union[builtins.int, __main__.Node[builtins.list[builtins.int], builtins.list[builtins.str]]]'
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesAny]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        self.x = x
+        self.y = y
+
+IntNode = Node[int, S]
+AnyNode = Node[S, T]
+
+def output() -> IntNode[str]:
+    return Node(1, 'x')
+x = output() # type: IntNode # This is OK (implicit Any)
+
+y = None # type: IntNode
+y.x = 1
+y.x = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+y.y = 1 # Both are OK (implicit Any)
+y.y = 'x'
+
+z = Node(1, 'x') # type: AnyNode
+reveal_type(z) # E: Revealed type is '__main__.Node[Any, Any]'
+
+[out]
+
+[case testGenericTypeAliasesAcessingMethods]
+from typing import TypeVar, Generic, List
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        self.x = x
+    def meth(self) -> T:
+        return self.x
+
+ListedNode = Node[List[T]]
+l = None # type: ListedNode[int]
+l.x.append(1)
+l.meth().append(1)
+reveal_type(l.meth()) # E: Revealed type is 'builtins.list*[builtins.int]'
+l.meth().append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+
+ListedNode[str]([]).x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type List[str])
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesSubclassing]
+from typing import TypeVar, Generic, Tuple, List
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+
+TupledNode = Node[Tuple[T, T]]
+
+class D(TupledNode[T]):
+    ...
+class L(List[TupledNode[T]]):
+    ...
+
+def f_bad(x: T) -> D[T]:
+    return D(1)  # Error, see out
+
+L[int]().append(Node((1, 1)))
+L[int]().append(5) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected Node[Tuple[int, int]]
+
+x = D((1, 1)) # type: D[int]
+y = D(5) # type: D[int] # E: Argument 1 to "D" has incompatible type "int"; expected "Tuple[int, int]"
+
+def f(x: T) -> D[T]:
+    return D((x, x))
+reveal_type(f('a'))  # E: Revealed type is '__main__.D[builtins.str*]'
+
+[builtins fixtures/list.pyi]
+[out]
+main:15: error: Argument 1 to "D" has incompatible type "int"; expected "Tuple[T, T]"
+
+[case testGenericTypeAliasesSubclassingBad]
+from typing import TypeVar, Generic, Tuple, Union
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+
+TupledNode = Node[Tuple[T, T]]
+UNode = Union[int, Node[T]]
+
+class C(TupledNode): ... # Same as TupledNode[Any]
+class D(TupledNode[T]): ...
+class E(Generic[T], UNode[T]): ... # E: Invalid base class
+
+reveal_type(D((1, 1))) # E: Revealed type is '__main__.D[builtins.int*]'
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesUnion]
+from typing import TypeVar, Generic, Union, Any
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        self.x = x
+
+UNode = Union[int, Node[T]]
+x = 1 # type: UNode[int]
+
+x + 1 # E: Unsupported left operand type for + (some union)
+if not isinstance(x, Node):
+    x + 1
+
+if not isinstance(x, int):
+   x.x = 1
+   x.x = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+def f(x: T) -> UNode[T]:
+    if 1:
+        return Node(x)
+    else:
+        return 1
+
+reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int, __main__.Node[builtins.int*]]'
+
+TNode = Union[T, Node[int]]
+s = 1 # type: TNode[str] # E: Incompatible types in assignment (expression has type "int", variable has type "Union[str, Node[int]]")
+
+if not isinstance(s, str):
+    s.x = 1
+
+z = None # type: TNode # Same as TNode[Any]
+z.x
+z.foo() # E: Item Node[int] of "Union[Any, Node[int]]" has no attribute "foo"
+
+[builtins fixtures/isinstance.pyi]
+
+[case testGenericTypeAliasesTuple]
+from typing import TypeVar, Tuple
+T = TypeVar('T')
+
+SameTP = Tuple[T, T]
+IntTP = Tuple[int, T]
+
+def f1(x: T) -> SameTP[T]:
+    return x, x
+
+a, b, c = f1(1) # E: Need more than 2 values to unpack (3 expected)
+x, y = f1(1)
+reveal_type(x) # E: Revealed type is 'builtins.int'
+
+def f2(x: IntTP[T]) -> IntTP[T]:
+    return x
+
+f2((1, 2, 3)) # E: Argument 1 to "f2" has incompatible type "Tuple[int, int, int]"; expected "Tuple[int, <nothing>]"
+reveal_type(f2((1, 'x'))) # E: Revealed type is 'Tuple[builtins.int, builtins.str*]'
+
+[builtins fixtures/for.pyi]
+
+[case testGenericTypeAliasesCallable]
+from typing import TypeVar, Generic, Callable
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+
+BadC = Callable[T] # E: Invalid function type
+
+C = Callable[..., T]
+C2 = Callable[[T, T], Node[T]]
+
+def make_cb(x: T) -> C[T]:
+    return lambda *args: x
+
+reveal_type(make_cb(1)) # E: Revealed type is 'def (*Any, **Any) -> builtins.int*'
+
+def use_cb(arg: T, cb: C2[T]) -> Node[T]:
+    return cb(arg, arg)
+
+use_cb(1, 1) # E: Argument 2 to "use_cb" has incompatible type "int"; expected Callable[[int, int], Node[int]]
+my_cb = None # type: C2[int]
+use_cb('x', my_cb) # E: Argument 2 to "use_cb" has incompatible type Callable[[int, int], Node[int]]; expected Callable[[str, str], Node[str]]
+reveal_type(use_cb(1, my_cb)) # E: Revealed type is '__main__.Node[builtins.int]'
+
+[out]
+
+[case testGenericTypeAliasesPEPBasedExample]
+from typing import TypeVar, List, Tuple
+T = TypeVar('T', int, bool)
+
+Vec = List[Tuple[T, T]]
+
+vec = []  # type: Vec[bool]
+vec.append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "Tuple[bool, bool]"
+reveal_type(vec[0]) # E: Revealed type is 'Tuple[builtins.bool, builtins.bool]'
+
+def fun1(v: Vec[T]) -> T:
+    return v[0][0]
+def fun2(v: Vec[T], scale: T) -> Vec[T]:
+    return v
+
+reveal_type(fun1([(1, 1)])) # E: Revealed type is 'builtins.int*'
+fun1(1) # E: Argument 1 to "fun1" has incompatible type "int"; expected List[Tuple[int, int]]
+fun1([(1, 'x')]) # E: Cannot infer type argument 1 of "fun1"
+
+reveal_type(fun2([(1, 1)], 1)) # E: Revealed type is 'builtins.list[Tuple[builtins.int*, builtins.int*]]'
+fun2([('x', 'x')], 'x') # E: Type argument 1 of "fun2" has incompatible value "str"
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesImporting]
+from typing import TypeVar
+from a import Node, TupledNode
+T = TypeVar('T')
+
+n = None # type: TupledNode[int]
+n.x = 1
+n.y = (1, 1)
+n.y = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "Tuple[int, int]")
+
+def f(x: Node[T, T]) -> TupledNode[T]:
+    return Node(x.x, (x.x, x.x))
+
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected Node[<nothing>, <nothing>]
+f(Node(1, 'x')) # E: Cannot infer type argument 1 of "f"
+reveal_type(Node('x', 'x')) # E: Revealed type is 'a.Node[builtins.str*, builtins.str*]'
+
+[file a.py]
+from typing import TypeVar, Generic, Tuple
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        self.x = x
+        self.y = y
+
+TupledNode = Node[T, Tuple[T, T]]
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesImportingWithoutTypeVar]
+from typing import Tuple
+from lib import Transform
+
+def int_tf(m: int) -> Transform[int, str]:
+    def transform(i: int, pos: int) -> Tuple[int, str]:
+        pass
+    return transform
+
+var: Transform[int, str]
+reveal_type(var)  # E: Revealed type is 'def (builtins.int, builtins.int) -> Tuple[builtins.int, builtins.str]'
+[file lib.py]
+from typing import Callable, TypeVar, Tuple
+
+T = TypeVar('T')
+R = TypeVar('R')
+
+Transform = Callable[[T, int], Tuple[T, R]]
+[out]
+
+[case testGenericTypeAliasesImportingWithoutTypeVarError]
+from a import Alias
+x: Alias[int, str]  # E: Bad number of arguments for type alias, expected: 1, given: 2
+reveal_type(x)  # E: Revealed type is 'builtins.list[builtins.list[Any]]'
+
+[file a.py]
+from typing import TypeVar, List
+T = TypeVar('T')
+
+Alias = List[List[T]]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testGenericAliasWithTypeVarsFromDifferentModules]
+from mod import Alias, TypeVar
+
+S = TypeVar('S')
+NewAlias = Alias[int, int, S, S]
+class C: pass
+
+x: NewAlias[str]
+reveal_type(x)  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.int, builtins.str, builtins.str]]'
+y: Alias[int, str, C, C]
+reveal_type(y)  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str, __main__.C, __main__.C]]'
+
+[file mod.py]
+from typing import TypeVar, List, Tuple
+import a
+import b
+T = TypeVar('T')
+Alias = List[Tuple[T, a.T, b.T, b.B.T]]  # alias_tvars here will be ['T', 'a.T', 'b.T', 'b.B.T']
+
+[file a.py]
+from typing import TypeVar
+T = TypeVar('T')
+
+[file b.py]
+from typing import TypeVar
+
+T = TypeVar('T')
+class B:
+    T = TypeVar('T')
+[builtins fixtures/list.pyi]
+[out]
+
+[case testTypeAliasesResultingInPlainInstance]
+from typing import Optional, Union
+
+O = Optional[int]
+U = Union[int]
+
+x: O
+y: U
+
+reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+reveal_type(y)  # E: Revealed type is 'builtins.int'
+
+U[int]  # E: Bad number of arguments for type alias, expected: 0, given: 1
+O[int]  # E: Bad number of arguments for type alias, expected: 0, given: 1
+[out]
+
+[case testAliasesInClassBodyNormalVsSubscripted]
+from typing import Union, Type, Iterable
+
+class A: pass
+class B(A): pass
+class C:
+    a = A  # This is a variable
+    b = Union[int, str]  # This is an alias
+    c: Type[object] = Iterable[int]  # This is however also a variable
+    a = B
+    b = int  # E: Cannot assign multiple types to name "b" without an explicit "Type[...]" annotation \
+             # E: Incompatible types in assignment (expression has type Type[int], variable has type "Type alias to Union")
+    c = int
+    def f(self, x: a) -> None: pass  # E: Invalid type "__main__.C.a"
+    def g(self, x: b) -> None: pass
+    def h(self, x: c) -> None: pass  # E: Invalid type "__main__.C.c"
+    x: b
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[out]
+
+[case testGenericTypeAliasesRuntimeExpressionsInstance]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        ...
+
+IntNode = Node[int, T]
+IntNode[int](1, 1)
+IntNode[int](1, 'a')  # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
+
+SameNode = Node[T, T]
+ff = SameNode[T](1, 1)  # E: Need type annotation for variable
+a = SameNode(1, 'x')
+reveal_type(a) # E: Revealed type is '__main__.Node[Any, Any]'
+b = SameNode[int](1, 1)
+reveal_type(b) # E: Revealed type is '__main__.Node[builtins.int*, builtins.int*]'
+SameNode[int](1, 'x') # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
+
+[out]
+
+[case testGenericTypeAliasesRuntimeExpressionsOther]
+from typing import TypeVar, Union, Tuple, Callable, Any
+T = TypeVar('T')
+
+CA = Callable[[T], int]
+TA = Tuple[T, int]
+UA = Union[T, int]
+
+cs = CA[str] + 1 # E: Unsupported left operand type for + ("Type alias to Callable")
+reveal_type(cs) # E: Revealed type is 'Any'
+
+ts = TA[str]() # E: "Type alias to Tuple" not callable
+reveal_type(ts) # E: Revealed type is 'Any'
+
+us = UA[str].x # E: "Type alias to Union" has no attribute "x"
+reveal_type(us) # E: Revealed type is 'Any'
+
+[out]
+
+[case testGenericTypeAliasesTypeVarBinding]
+from typing import TypeVar, Generic, List
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None: ...
+
+class B(Generic[T, S]):
+    def __init__(self, x: List[T], y: List[S]) -> None: ...
+
+SameA = A[T, T]
+SameB = B[T, T]
+
+class C(Generic[T]):
+    a = None # type: SameA[T]
+    b = SameB[T]([], [])
+
+reveal_type(C[int]().a) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]'
+reveal_type(C[str]().b) # E: Revealed type is '__main__.B[builtins.str*, builtins.str*]'
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesTypeVarConstraints]
+# flags: --show-column-numbers
+from typing import TypeVar, Generic
+T = TypeVar('T', int, list)
+S = TypeVar('S', int, list)
+
+class A(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None: ...
+
+BadA = A[str, T]  # One error here
+SameA = A[T, T]
+
+x = None # type: SameA[int]
+y = None # type: SameA[str] # Two errors here, for both args of A
+
+[builtins fixtures/list.pyi]
+[out]
+main:9:7: error: Type argument 1 of "A" has incompatible value "str"
+main:13: error: Type argument 1 of "A" has incompatible value "str"
+main:13: error: Type argument 2 of "A" has incompatible value "str"
+
+[case testGenericTypeAliasesIgnoredPotentialAlias]
+class A: ...
+Bad = A[int] # type: ignore
+
+reveal_type(Bad) # E: Revealed type is 'Any'
+[out]
+
+[case testNoSubscriptionOfBuiltinAliases]
+from typing import List, TypeVar
+
+list[int]() # E: "list" is not subscriptable
+
+ListAlias = List
+def fun() -> ListAlias[int]:
+    pass
+
+reveal_type(fun())  # E: Revealed type is 'builtins.list[builtins.int]'
+
+BuiltinAlias = list
+BuiltinAlias[int]() # E: "list" is not subscriptable
+
+#check that error is reported only once, and type is still stored
+T = TypeVar('T')
+BadGenList = list[T] # E: "list" is not subscriptable
+
+reveal_type(BadGenList[int]()) # E: Revealed type is 'builtins.list[builtins.int*]'
+reveal_type(BadGenList()) # E: Revealed type is 'builtins.list[Any]'
+
+[builtins fixtures/list.pyi]
+[out]
+
+[case testImportedTypeAliasInRuntimeContext]
+from m import Alias
+
+n = Alias[int]([1])
+reveal_type(n)  # E: Revealed type is 'm.Node[builtins.list*[builtins.int]]'
+bad = Alias[str]([1])  # E: List item 0 has incompatible type "int"
+
+n2 = Alias([1]) # Same as Node[List[Any]]
+reveal_type(n2)  # E: Revealed type is 'm.Node[builtins.list*[Any]]'
+[file m.py]
+from typing import TypeVar, Generic, List
+T = TypeVar('T')
+
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        self.x = x
+
+Alias = Node[List[T]]
+[builtins fixtures/list.pyi]
+[out]
+
+-- Simplified declaration of generics
+-- ----------------------------------
+
+[case testSimplifiedGenericSimple]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class B(Generic[T]):
+    def b(self) -> T: ...
+
+class C(Generic[T]):
+    def c(self) -> T: ...
+
+class D(B[T], C[S]): ...
+
+reveal_type(D[str, int]().b()) # E: Revealed type is 'builtins.str*'
+reveal_type(D[str, int]().c()) # E: Revealed type is 'builtins.int*'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testSimplifiedGenericCallable]
+from typing import TypeVar, Generic, Callable
+T = TypeVar('T')
+S = TypeVar('S')
+class B(Generic[T]):
+    def b(self) -> T: ...
+
+class D(B[Callable[[T], S]]): ...
+
+reveal_type(D[str, int]().b()) # E: Revealed type is 'def (builtins.str*) -> builtins.int*'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testSimplifiedGenericComplex]
+from typing import TypeVar, Generic, Tuple
+T = TypeVar('T')
+S = TypeVar('S')
+U = TypeVar('U')
+
+class A(Generic[T, S]):
+    pass
+
+class B(Generic[T, S]):
+    def m(self) -> Tuple[T, S]:
+        pass
+
+class C(A[S, B[T, int]], B[U, A[int, T]]):
+    pass
+
+c = C[object, int, str]()
+reveal_type(c.m()) # E: Revealed type is 'Tuple[builtins.str*, __main__.A*[builtins.int, builtins.int*]]'
+[builtins fixtures/tuple.pyi]
+[out]
+
+
+[case testSimplifiedGenericOrder]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+
+class B(Generic[T]):
+    def b(self) -> T: ...
+
+class C(Generic[T]):
+    def c(self) -> T: ...
+
+class D(B[T], C[S], Generic[S, T]): ...
+
+reveal_type(D[str, int]().b()) # E: Revealed type is 'builtins.int*'
+reveal_type(D[str, int]().c()) # E: Revealed type is 'builtins.str*'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testSimplifiedGenericDuplicate]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+
+class A(Generic[T, T]): # E: Duplicate type variables in Generic[...]
+    pass
+
+a = A[int]()
+[builtins fixtures/list.pyi]
+[out]
+
+[case testSimplifiedGenericNotAll]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A(Generic[T]):
+    pass
+class B(Generic[T]):
+    pass
+
+class C(A[T], B[S], Generic[T]): # E: If Generic[...] is present it should list all type variables
+    pass
+
+c = C[int, str]()
+[builtins fixtures/list.pyi]
+[out]
+
+[case testSimplifiedGenericInvalid]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+
+class A(Generic[T]):
+    pass
+
+class B(A[S]): # E: Name 'S' is not defined
+    pass
+[builtins fixtures/list.pyi]
+[out]
+
+
+-- Multiple assignment with lists
+-- ------------------------------
+
+
+[case testMultipleAssignmentWithLists]
+from typing import List
+class A: pass
+class B: pass
+class B2(B): pass
+a = None # type: A
+b = None # type: B
+b2 = None # type: B2
+
+list_a = [a]
+list_b = [b]
+list_b2 = [b2]
+
+a, b = list_a   # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b, a = list_a   # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b2, b2 = list_b # E: Incompatible types in assignment (expression has type "B", variable has type "B2")
+
+a, a = list_a
+b, b2, b = list_b2
+[builtins fixtures/for.pyi]
+
+[case testMultipleAssignmentWithListsInInitialization]
+from typing import List
+class A: pass
+list_object = [object()]
+list_a = [A()]
+a, b = list_object # type: (A, object) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+c, d = list_object # type: (object, A) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+e, f = list_a # type: (A, object)
+[builtins fixtures/for.pyi]
+
+[case testMultipleAssignmentWithListAndIndexing]
+from typing import List
+a = None # type: List[A]
+b = None # type: List[int]
+
+a[1], b[1] = a # E: Incompatible types in assignment (expression has type "A", target has type "int")
+a[1], a[2] = a
+
+class A: pass
+[file builtins.py]
+from typing import TypeVar, Generic, Iterable
+T = TypeVar('T')
+class object: pass
+class list(Iterable[T]):
+  def __setitem__(self, x: int, v: T) -> None: pass
+class int: pass
+class type: pass
+class tuple: pass
+class function: pass
+class str: pass
+
+[case testMultipleAssignmentWithIterable]
+from typing import Iterable, TypeVar
+a = None  # type: int
+b = None  # type: str
+T = TypeVar('T')
+
+def f(x: T) -> Iterable[T]: pass
+
+a, b = f(a)   # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+b, b = f(a)   # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+a, a = f(a)
+b, b = f(b)
+[builtins fixtures/for.pyi]
+
+
+-- Error messages
+-- --------------
+
+
+[case testErrorWithLongGenericTypeName]
+from typing import TypeVar, Generic
+B = TypeVar('B')
+C = TypeVar('C')
+D = TypeVar('D')
+E = TypeVar('E')
+F = TypeVar('F')
+G = TypeVar('G')
+H = TypeVar('H')
+I = TypeVar('I')
+J = TypeVar('J')
+K = TypeVar('K')
+L = TypeVar('L')
+M = TypeVar('M')
+N = TypeVar('N')
+O = TypeVar('O')
+P = TypeVar('P')
+Q = TypeVar('Q')
+R = TypeVar('R')
+S = TypeVar('S')
+T = TypeVar('T')
+U = TypeVar('U')
+V = TypeVar('V')
+W = TypeVar('W')
+X = TypeVar('X')
+Y = TypeVar('Y')
+Z = TypeVar('Z')
+class OO: pass
+a = None # type: A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]
+
+f(a) # E: Argument 1 to "f" has incompatible type A[...]; expected "OO"
+
+def f(a: OO) -> None:
+    pass
+class A(Generic[B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z]): pass
+
+[case testErrorWithShorterGenericTypeName]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+a = None # type: A[object, B]
+f(a) # E: Argument 1 to "f" has incompatible type A[object, B]; expected "B"
+
+def f(a: 'B') -> None: pass
+class A(Generic[S, T]): pass
+class B: pass
+
+[case testErrorWithShorterGenericTypeName2]
+from typing import Callable, TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+a = None # type: A[object, Callable[[], None]]
+f(a) # E: Argument 1 to "f" has incompatible type A[object, Callable[[], None]]; expected "B"
+
+def f(a: 'B') -> None: pass
+class A(Generic[S, T]): pass
+class B: pass
+
+
+-- Overloads + generics
+-- --------------------
+
+
+[case testGenericArgumentInOverload]
+from foo import *
+[file foo.pyi]
+from typing import overload, List
+class A: pass
+class B: pass
+a, b = None, None # type: (A, B)
+
+ at overload
+def f(a: List[A]) -> A: pass
+ at overload
+def f(a: B) -> B: pass
+
+b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f([b]) # E: List item 0 has incompatible type "B"
+a = f(b)   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+a = f([a])
+b = f(b)
+[builtins fixtures/list.pyi]
+
+[case testGenericFunctionAsOverloadItem]
+from foo import *
+[file foo.pyi]
+from typing import overload, TypeVar, List
+T = TypeVar('T')
+class A: pass
+class B: pass
+
+ at overload
+def f(a: B) -> B: pass
+ at overload
+def f(a: List[T]) -> T: pass
+
+a, b = None, None # type: (A, B)
+
+b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f([b]) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = f(b)   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+a = f([a])
+b = f([b])
+b = f(b)
+[builtins fixtures/list.pyi]
+
+
+-- Type variable scoping
+-- ---------------------
+
+
+[case testLocalTypeVariable]
+from typing import TypeVar
+def f() -> None:
+    T = TypeVar('T')
+    def g(x: T) -> T: pass
+    a = g(1)
+    a = 1
+    a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[out]
+
+[case testClassLevelTypeVariable]
+from typing import TypeVar
+class A:
+    T = TypeVar('T')
+    def g(self, x: T) -> T: pass
+a = A().g(1)
+a = 1
+a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testGenericInnerClass]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A:
+    class B(Generic[T]):
+        def meth(self) -> T:  ...
+    B[int]()
+    reveal_type(B[int]().meth) # E: Revealed type is 'def () -> builtins.int*'
+
+A.B[int]()
+reveal_type(A.B[int]().meth) # E: Revealed type is 'def () -> builtins.int*'
+
+[case testGenericClassInnerFunctionTypeVariable]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def __init__(self, a: T) -> None:
+        self.a = a
+    def f(self, n: int) -> None:
+        def g(a: T):
+            self.a = a
+        g(self.a)
+        g(n) # E: Argument 1 to "g" has incompatible type "int"; expected "T"
+
+[case testFunctionInGenericInnerClassTypeVariable]
+from typing import TypeVar, Generic
+
+T = TypeVar('T')
+class Outer(Generic[T]):
+    class Inner:
+        x: T  # E: Invalid type "__main__.T"
+        def f(self, x: T) -> T: ...  # E: Type variable 'T' is bound by an outer class
+        def g(self) -> None:
+            y: T  # E: Invalid type "__main__.T"
+
+
+-- Callable subtyping with generic functions
+-- -----------------------------------------
+
+
+[case testSubtypingWithGenericFunctions]
+from typing import TypeVar
+A = TypeVar('A')
+B = TypeVar('B')
+
+def f1(x: A) -> A: ...
+def f2(x: A) -> B: ...
+def f3(x: B) -> B: ...
+def f4(x: int) -> A: ...
+
+y1 = f1
+y1 = f1
+y1 = f2
+y1 = f3
+y1 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[A], A])
+
+y2 = f2
+y2 = f2
+y2 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], B])
+y2 = f3 # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[A], B])
+y2 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[A], B])
+
+y3 = f3
+y3 = f3
+y3 = f1
+y3 = f2
+y3 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[B], B])
+
+y4 = f4
+y4 = f4
+y4 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[int], A])
+y4 = f2
+y4 = f3 # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[int], A])
+
+[case testSubtypingWithGenericInnerFunctions]
+from typing import TypeVar
+A = TypeVar('A')
+B = TypeVar('B')
+T = TypeVar('T')
+def outer(t: T) -> None:
+    def f1(x: A) -> A: ...
+    def f2(x: A) -> B: ...
+    def f3(x: T) -> A: ...
+    def f4(x: A) -> T: ...
+    def f5(x: T) -> T: ...
+
+    y1 = f1
+    y1 = f2
+    y1 = f3 # E: Incompatible types in assignment (expression has type Callable[[T], A], variable has type Callable[[A], A])
+    y1 = f4 # E: Incompatible types in assignment (expression has type Callable[[A], T], variable has type Callable[[A], A])
+    y1 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[A], A])
+
+    y2 = f2
+    y2 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], B])
+
+    y3 = f3
+    y3 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[T], A])
+    y3 = f2
+    y3 = f4 # E: Incompatible types in assignment (expression has type Callable[[A], T], variable has type Callable[[T], A])
+    y3 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], A])
+
+    y4 = f4
+    y4 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], T])
+    y4 = f2
+    y4 = f3 # E: Incompatible types in assignment (expression has type Callable[[T], A], variable has type Callable[[A], T])
+    y4 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[A], T])
+
+    y5 = f5
+    y5 = f1
+    y5 = f2
+    y5 = f3
+    y5 = f4
+[out]
+
+[case testSubtypingWithGenericFunctionUsingTypevarWithValues]
+from typing import TypeVar, Callable
+T = TypeVar('T', int, str)
+def f(x: T) -> T: pass
+def g1(f: Callable[[str], str]) -> None: pass
+g1(f)
+def g2(f: Callable[[int], int]) -> None: pass
+g2(f)
+def g3(f: Callable[[object], object]) -> None: pass
+g3(f) # E: Argument 1 to "g3" has incompatible type Callable[[T], T]; \
+           expected Callable[[object], object]
+
+[case testSubtypingWithGenericFunctionUsingTypevarWithValues2-skip]
+from typing import TypeVar, Callable
+T = TypeVar('T', int, str)
+def f(x: T) -> T: pass
+g = f
+g = f
+
+
+--Operations on type variable types
+-- ---------------------------------
+
+
+[case testTypeVariableTypeEquality]
+from typing import TypeVar
+T = TypeVar('T')
+def f(a: T, b: T) -> T:
+    a.__ne__(b)
+    if a == b:
+        return a
+    else:
+        return b
+[builtins fixtures/ops.pyi]
+
+[case testTypeVariableTypeIs]
+from typing import TypeVar
+T = TypeVar('T')
+def f(a: T, b: T) -> T:
+    if a is b or a is 1:
+        return a
+    else:
+        return b
+[builtins fixtures/ops.pyi]
+
+[case testTypeVariableTypeLessThan]
+from typing import TypeVar
+T = TypeVar('T')
+def f(a: T, b: T) -> T:
+    if a < b:
+        return a
+    else:
+        return b
+[builtins fixtures/ops.pyi]
+[out]
+main:4: error: Unsupported left operand type for < ("T")
+
+
+-- Subtyping generic callables
+-- ---------------------------
+
+[case testSubtypingGenericTypeObject]
+from typing import Callable, Generic, TypeVar
+T = TypeVar('T')
+class C(Generic[T]):
+    def __init__(self) -> None: pass
+x = C # type: Callable[[], C[int]]
+y = C # type: Callable[[], int] # E: Incompatible types in assignment (expression has type Type[C[Any]], variable has type Callable[[], int])
+
+
+-- Special cases
+-- -------------
+
+
+[case testIdentityHigherOrderFunction]
+from typing import Callable, TypeVar
+A = TypeVar('A')
+B = TypeVar('B')
+def square(n: int) -> int:
+    return n
+def id(f: Callable[[A], B]) -> Callable[[A], B]:
+    return f
+g = id(square)
+g(1)
+g('x')  # E: Argument 1 has incompatible type "str"; expected "int"
+
+
+[case testIdentityHigherOrderFunction2]
+from typing import Callable, TypeVar
+A = TypeVar('A')
+def voidify(n: int) -> None: pass
+def identity(f: Callable[[A], None]) -> Callable[[A], None]:
+    return f
+identity(voidify)(3)
+
+[case testIdentityHigherOrderFunction3]
+from typing import Callable, TypeVar
+A = TypeVar('A')
+B = TypeVar('B')
+def fn(n: B) -> None: pass
+def identity(f: A) -> A:
+    return f
+identity(fn)
+identity(fn)('x')
+
+[case testTypeVariableUnionAndCallableInTypeInference]
+from typing import Union, Callable, TypeVar
+T = TypeVar('T')
+def f(x: T, y: Union[T, Callable[[T], None]]) -> None: pass
+f('', '')
+
+[case testGenericFunctionsWithUnalignedIds]
+from typing import TypeVar
+A = TypeVar('A')
+B = TypeVar('B')
+def f1(x: int, y: A) -> A: ...
+def f2(x: int, y: A) -> B: ...
+def f3(x: A, y: B) -> B: ...
+g = f1
+g = f2
+g = f3
+
+[case testTypeVariableWithContainerAndTuple]
+from typing import TypeVar, Container
+T = TypeVar('T')
+def f(x: Container[T]) -> T: ...
+reveal_type(f((1, 2))) # E: Revealed type is 'builtins.int*'
+
+[case testClassMethodInGenericClassWithGenericConstructorArg]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def __init__(self, a: T) -> None: pass
+    @classmethod
+    def f(cls) -> None: pass
+[builtins fixtures/classmethod.pyi]
+
+[case testClassMethodInClassWithGenericConstructor]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A:
+    def __init__(self, a: T) -> None: pass
+    @classmethod
+    def f(cls) -> None: pass
+[builtins fixtures/classmethod.pyi]
+
+[case testGenericOperatorMethodOverlapping]
+from typing import TypeVar, Generic, Tuple
+T = TypeVar('T')
+T2 = TypeVar('T2')
+S = TypeVar('S', bound=str)
+S2 = TypeVar('S2', bound=str)
+class G(Generic[T]):
+    pass
+class A:
+    def __or__(self, x: G[T]) -> G[T]: pass
+    def __ior__(self, x: G[T2]) -> G[T2]: pass
+class B:
+    def __or__(self, x: G[T]) -> G[T]: pass
+    def __ior__(self, x: G[S]) -> G[S]: pass \
+        # E: Signatures of "__ior__" and "__or__" are incompatible
+class C:
+    def __or__(self, x: G[S]) -> G[S]: pass
+    def __ior__(self, x: G[S2]) -> G[S2]: pass
+
+[case testGenericOperatorMethodOverlapping2]
+from typing import TypeVar, Generic, Tuple
+X = TypeVar('X')
+T = TypeVar('T', int, str)
+T2 = TypeVar('T2', int, str)
+S = TypeVar('S', float, str)
+S2 = TypeVar('S2', float, str)
+class G(Generic[X]):
+    pass
+class A:
+    def __or__(self, x: G[T]) -> G[T]: pass
+    def __ior__(self, x: G[T2]) -> G[T2]: pass
+class B:
+    def __or__(self, x: G[T]) -> G[T]: pass
+    def __ior__(self, x: G[S]) -> G[S]: pass \
+        # E: Signatures of "__ior__" and "__or__" are incompatible
+class C:
+    def __or__(self, x: G[S]) -> G[S]: pass
+    def __ior__(self, x: G[S2]) -> G[S2]: pass
+class D:
+    def __or__(self, x: G[X]) -> G[X]: pass
+    def __ior__(self, x: G[S2]) -> G[S2]: pass \
+        # E: Signatures of "__ior__" and "__or__" are incompatible
diff --git a/test-data/unit/check-ignore.test b/test-data/unit/check-ignore.test
new file mode 100644
index 0000000..3c81bd0
--- /dev/null
+++ b/test-data/unit/check-ignore.test
@@ -0,0 +1,218 @@
+[case testIgnoreTypeError]
+x = 1
+x() # type: ignore
+x() # E: "int" not callable
+
+[case testIgnoreUndefinedName]
+x = 1
+y # type: ignore
+z # E: Name 'z' is not defined
+
+[case testIgnoreImportError]
+import xyz_m # type: ignore
+xyz_m.foo
+1() # E: "int" not callable
+
+[case testIgnoreImportFromError]
+from xyz_m import a, b # type: ignore
+a.foo
+b()
+1() # E: "int" not callable
+
+[case testIgnoreImportFromErrorMultiline]
+from xyz_m import ( # type: ignore
+    a, b
+)
+a.foo
+b()
+1() # E: "int" not callable
+
+[case testIgnoreImportAllError]
+from xyz_m import * # type: ignore
+x   # E: Name 'x' is not defined
+1() # E: "int" not callable
+
+[case testIgnoreImportBadModule]
+import m # type: ignore
+from m import a # type: ignore
+[file m.py]
++
+[out]
+tmp/m.py:1: error: invalid syntax
+
+[case testIgnoreAppliesOnlyToMissing]
+import a # type: ignore
+import b # type: ignore
+reveal_type(a.foo) # E: Revealed type is 'Any'
+reveal_type(b.foo) # E: Revealed type is 'builtins.int'
+a.bar()
+b.bar() # E: Module has no attribute "bar"
+
+[file b.py]
+foo = 3
+
+[builtins fixtures/module_all.pyi]
+[out]
+
+[case testIgnoreImportStarFromBadModule]
+from m import * # type: ignore
+[file m.py]
++
+[out]
+tmp/m.py:1: error: invalid syntax
+
+[case testIgnoreAssignmentTypeError]
+x = 1
+x = '' # type: ignore
+x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testIgnoreInvalidOverride]
+class A:
+    def f(self) -> int: pass
+class B(A):
+    def f(self) -> str: pass # type: ignore
+
+[case testIgnoreMissingModuleAttribute]
+import m
+m.x = object # type: ignore
+m.f() # type: ignore
+m.y # E: Module has no attribute "y"
+[file m.py]
+[builtins fixtures/module.pyi]
+
+[case testIgnoreTypeInferenceError]
+x = [] # type: ignore
+y = x
+x.append(1)
+[builtins fixtures/list.pyi]
+
+[case testIgnoreTypeInferenceError2]
+def f() -> None: pass
+x = f() # type: ignore
+y = x
+x = 1
+[builtins fixtures/list.pyi]
+
+[case testIgnoreTypeInferenceErrorAndMultipleAssignment]
+x, y = [], [] # type: ignore
+z = x
+z = y
+[builtins fixtures/list.pyi]
+
+[case testIgnoreSomeStarImportErrors]
+from m1 import *
+from m2 import * # type: ignore
+# We should still import things that don't conflict.
+y() # E: "str" not callable
+z() # E: "int" not callable
+x() # E: "int" not callable
+[file m1.py]
+x = 1
+y = ''
+[file m2.py]
+x = ''
+z = 1
+
+[case testIgnoredModuleDefinesBaseClass1]
+from m import B # type: ignore
+
+class C(B):
+    def f(self) -> None:
+        self.f(1) # E: Too many arguments for "f" of "C"
+        self.g(1)
+[out]
+
+[case testIgnoredModuleDefinesBaseClass2]
+import m # type: ignore
+
+class C(m.B):
+    def f(self) -> None: ...
+
+c = C()
+c.f(1) # E: Too many arguments for "f" of "C"
+c.g(1)
+c.x = 1
+[out]
+
+[case testIgnoredModuleDefinesBaseClassAndClassAttribute]
+import m # type: ignore
+
+class C(m.B):
+    @staticmethod
+    def f() -> None: pass
+
+C.f(1) # E: Too many arguments for "f" of "C"
+C.g(1)
+C.x = 1
+[builtins fixtures/staticmethod.pyi]
+[out]
+
+[case testIgnoredModuleDefinesBaseClassWithInheritance1]
+from m import B # type: ignore
+
+class C: pass
+class D(C, B):
+    def f(self) -> None:
+        self.f(1) # E: Too many arguments for "f" of "D"
+        self.g(1)
+[out]
+
+[case testIgnoredModuleDefinesBaseClassWithInheritance2]
+from m import B # type: ignore
+
+class C(B): pass
+class D(C):
+    def f(self) -> None:
+        self.f(1) # E: Too many arguments for "f" of "D"
+        self.g(1)
+[out]
+
+[case testIgnoreWithFollowingIndentedComment]
+if 1:  # type: ignore
+    # blah
+    pass
+[out]
+
+[case testIgnoreTooManyTypeArguments]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+U = TypeVar('U')
+
+class Base(Generic[T, U]):
+  pass
+
+class PartialBase(Base[T, int], Generic[T]):
+  pass
+
+class Child(PartialBase[str, int]):  # type: ignore
+  pass
+
+
+def foo(x: Base[str, int]) -> None: pass
+foo(Child())
+
+def bar(x: Base[str, str]) -> None: pass
+bar(Child())
+[out]
+main:19: error: Argument 1 to "bar" has incompatible type "Child"; expected Base[str, str]
+
+[case testTypeIgnoreLineNumberWithinFile]
+import m
+pass # type: ignore
+m.f(kw=1)
+[file m.py]
+pass
+def f() -> None: pass
+[out]
+main:3: error: Unexpected keyword argument "kw" for "f"
+tmp/m.py:2: note: "f" defined here
+
+[case testIgnoreUnexpectedKeywordArgument]
+import m
+m.f(kw=1)  # type: ignore
+[file m.py]
+def f() -> None: pass
+[out]
+
+[case testCannotIgnoreBlockingError]
+yield  # type: ignore  # E: 'yield' outside function
diff --git a/test-data/unit/check-incomplete-fixture.test b/test-data/unit/check-incomplete-fixture.test
new file mode 100644
index 0000000..68c7c6c
--- /dev/null
+++ b/test-data/unit/check-incomplete-fixture.test
@@ -0,0 +1,98 @@
+-- Test cases for reporting errors when a test case uses a fixture with
+-- missing definitions.  At least in the most common cases this should not
+-- result in an uncaught exception.  These tests make sure that this behavior
+-- does not regress.
+--
+-- NOTE: These tests do NOT test behavior of mypy outside tests.
+
+[case testVariableUndefinedUsingDefaultFixture]
+import m
+# This used to cause a crash since types.ModuleType is not available
+# by default. We fall back to 'object' now.
+m.x # E: "object" has no attribute "x"
+[file m.py]
+
+[case testListMissingFromStubs]
+from typing import List
+def f(x: List[int]) -> None: pass
+[out]
+main:1: error: Name '__builtins__.list' is not defined
+main:1: note: Maybe your test fixture does not define "typing.List"?
+main:1: note: Consider adding [builtins fixtures/list.pyi] to your test description
+
+[case testDictMissingFromStubs]
+from typing import Dict
+def f(x: Dict[int]) -> None: pass
+[out]
+main:1: error: Name '__builtins__.dict' is not defined
+main:1: note: Maybe your test fixture does not define "typing.Dict"?
+main:1: note: Consider adding [builtins fixtures/dict.pyi] to your test description
+
+[case testSetMissingFromStubs]
+from typing import Set
+def f(x: Set[int]) -> None: pass
+[out]
+main:1: error: Name '__builtins__.set' is not defined
+main:1: note: Maybe your test fixture does not define "typing.Set"?
+main:1: note: Consider adding [builtins fixtures/set.pyi] to your test description
+
+[case testBoolMissingFromStubs]
+x: bool
+[out]
+main:1: error: Name 'bool' is not defined
+main:1: note: Maybe your test fixture does not define "builtins.bool"?
+main:1: note: Consider adding [builtins fixtures/bool.pyi] to your test description
+
+[case testBaseExceptionMissingFromStubs]
+e: BaseException
+[out]
+main:1: error: Name 'BaseException' is not defined
+main:1: note: Maybe your test fixture does not define "builtins.BaseException"?
+main:1: note: Consider adding [builtins fixtures/exception.pyi] to your test description
+
+[case testExceptionMissingFromStubs]
+e: Exception
+[out]
+main:1: error: Name 'Exception' is not defined
+main:1: note: Maybe your test fixture does not define "builtins.Exception"?
+main:1: note: Consider adding [builtins fixtures/exception.pyi] to your test description
+
+[case testIsinstanceMissingFromStubs]
+if isinstance(1, int):
+    pass
+[out]
+main:1: error: Name 'isinstance' is not defined
+main:1: note: Maybe your test fixture does not define "builtins.isinstance"?
+main:1: note: Consider adding [builtins fixtures/isinstancelist.pyi] to your test description
+
+[case testInvalidTupleDefinitionFromStubs]
+from typing import Tuple
+x: Tuple[int, ...]
+x[0]
+for y in x:
+    pass
+[out]
+-- These errors are pretty bad, but keeping this test anyway to
+-- avoid things getting worse.
+main:2: error: "tuple" expects no type arguments, but 1 given
+main:3: error: Value of type "tuple" is not indexable
+main:4: error: Iterable expected
+main:4: error: "tuple" has no attribute "__iter__"
+
+[case testClassmethodMissingFromStubs]
+class A:
+    @classmethod
+    def f(cls): pass
+[out]
+main:2: error: Name 'classmethod' is not defined
+main:2: note: Maybe your test fixture does not define "builtins.classmethod"?
+main:2: note: Consider adding [builtins fixtures/classmethod.pyi] to your test description
+
+[case testPropertyMissingFromStubs]
+class A:
+    @property
+    def f(self): pass
+[out]
+main:2: error: Name 'property' is not defined
+main:2: note: Maybe your test fixture does not define "builtins.property"?
+main:2: note: Consider adding [builtins fixtures/property.pyi] to your test description
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
new file mode 100644
index 0000000..557d42a
--- /dev/null
+++ b/test-data/unit/check-incremental.test
@@ -0,0 +1,2842 @@
+-- Checks for incremental mode (see testcheck.py).
+-- Each test is run twice, once with a cold cache, once with a warm cache.
+-- Before the tests are run again, in step N any *.py.N files are copied to
+-- *.py.
+--
+-- You can add an empty section like `[delete mod.py.2]` to delete `mod.py`
+-- before the second run.
+--
+-- Errors expected in the first run should be in the `[out1]` section, and
+-- errors expected in the second run should be in the `[out2]` section, and so on.
+-- If a section is omitted, it is expected there are no errors on that run.
+-- The number of runs is determined by the highest N in all [outN] sections, but
+-- there are always at least two runs. (Note that [out] is equivalent to [out1].)
+--
+-- The list of modules to be checked can be specified using
+-- # cmd: mypy -m mod1 mod2 mod3
+-- To check a different list on the second run, use
+-- # cmd2: mypy -m mod1 mod3
+-- (and cmd3 for the third run, and so on).
+--
+-- Extra command line flags may be specified using
+-- # flags: --some-flag
+-- If the second run requires different flags, those can be specified using
+-- # flags2: --another-flag
+-- (and flags3 for the third run, and so on).
+--
+-- Any files that we expect to be rechecked should be annotated in the [rechecked]
+-- annotation, and any files expect to be stale (aka have a modified interface)
+-- should be annotated in the [stale] annotation. Note that a file that ends up
+-- producing an error does not create a new cache file and so is not considered stale.
+--
+-- The test suite will automatically assume that __main__ is stale and rechecked in
+-- all cases so we can avoid constantly having to annotate it. The list of
+-- rechecked/stale files can be in any arbitrary order, or can be left empty
+-- if no files should be rechecked/stale.
+--
+-- There are additional incremental mode test cases in check-serialize.test.
+
+[case testIncrementalEmpty]
+[rechecked]
+[stale]
+
+[case testIncrementalBasics]
+import m
+[file m.py]
+def foo():
+    pass
+[file m.py.2]
+def foo() -> None:
+    pass
+[rechecked m]
+[stale m]
+
+[case testIncrementalError]
+import m
+[file m.py]
+def foo() -> None:
+    pass
+[file m.py.2]
+def foo() -> None:
+    bar()
+[rechecked m]
+[stale]
+[out2]
+tmp/m.py:2: error: Name 'bar' is not defined
+
+[case testIncrementalSimpleImportSequence]
+import mod1
+mod1.func1()
+
+[file mod1.py]
+import mod2
+def func1() -> None: mod2.func2()
+
+[file mod2.py]
+import mod3
+def func2() -> None: mod3.func3()
+
+[file mod3.py]
+def func3() -> None: pass
+
+[rechecked]
+[stale]
+
+
+[case testIncrementalInternalChangeOnly]
+import mod1
+mod1.func1()
+
+[file mod1.py]
+import mod2
+def func1() -> None: mod2.func2()
+
+[file mod2.py]
+import mod3
+def func2() -> None: mod3.func3()
+
+[file mod3.py]
+def func3() -> None: pass
+
+[file mod3.py.2]
+def func3() -> None: 3 + 2
+
+[rechecked mod3]
+[stale]
+
+
+[case testIncrementalImportGone]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+def func1() -> A: pass
+
+[file mod2.py]
+class A: pass
+
+[file mod1.py.2]
+def func1() -> A: pass
+
+[rechecked mod1]
+[stale]
+[out2]
+tmp/mod1.py:1: error: Name 'A' is not defined
+
+[case testIncrementalCallable]
+import mod1
+
+[file mod1.py]
+from typing import Callable
+from mypy_extensions import Arg
+def func1() -> Callable[[Arg(int, 'x')], int]: pass
+
+[file mod1.py.2]
+from typing import Callable
+from mypy_extensions import Arg
+def func1() -> Callable[[Arg(int, 'x')], int]: ...
+
+
+[rechecked mod1]
+[stale]
+
+[builtins fixtures/dict.pyi]
+
+[case testIncrementalSameNameChange]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+def func1() -> A: pass
+
+[file mod2.py]
+class A: pass
+
+[file mod2.py.2]
+class Parent: pass
+class A(Parent): pass
+
+[rechecked mod1, mod2]
+[stale mod2]
+
+[case testIncrementalPartialInterfaceChange]
+import mod1
+mod1.func1()
+
+[file mod1.py]
+import mod2
+def func1() -> None: mod2.func2()
+
+[file mod2.py]
+import mod3
+def func2() -> None: mod3.func3()
+
+[file mod3.py]
+def func3() -> None: pass
+
+[file mod3.py.2]
+def func3() -> int: return 2
+
+[rechecked mod2, mod3]
+[stale mod3]
+
+[case testIncrementalInternalFunctionDefinitionChange]
+import mod1
+
+[file mod1.py]
+import mod2
+def accepts_int(a: int) -> int: return a
+accepts_int(mod2.foo())
+
+[file mod2.py]
+def foo() -> int:
+    def inner() -> int:
+        return 42
+    return inner()
+
+[file mod2.py.2]
+def foo() -> int:
+    def inner2() -> str:
+        return "foo"
+    return inner2()
+
+[rechecked mod2]
+[stale]
+[out2]
+tmp/mod2.py:4: error: Incompatible return value type (got "str", expected "int")
+
+[case testIncrementalInternalScramble]
+import mod1
+
+[file mod1.py]
+import mod2
+mod2.foo()
+
+[file mod2.py]
+def baz() -> int:
+    return 3
+
+def bar() -> int:
+    return baz()
+
+def foo() -> int:
+    return bar()
+
+[file mod2.py.2]
+def foo() -> int:
+    return baz()
+
+def bar() -> int:
+    return bar()
+
+def baz() -> int:
+    return 42
+[rechecked mod2]
+[stale]
+
+[case testIncrementalMethodInterfaceChange]
+import mod1
+
+[file mod1.py]
+import mod2
+
+[file mod2.py]
+class Foo:
+    def bar(self, a: str) -> str:
+        return "a"
+
+[file mod2.py.2]
+class Foo:
+    def bar(self, a: float) -> str:
+        return "a"
+
+[rechecked mod1, mod2]
+[stale mod2]
+
+[case testIncrementalBaseClassChange]
+import mod1
+
+[file mod1.py]
+from mod2 import Child
+Child().good_method()
+
+[file mod2.py]
+class Good:
+    def good_method(self) -> int: return 1
+class Bad: pass
+class Child(Good): pass
+
+[file mod2.py.2]
+class Good:
+    def good_method(self) -> int: return 1
+class Bad: pass
+class Child(Bad): pass
+
+[rechecked mod1, mod2]
+[stale mod2]
+[out2]
+tmp/mod1.py:2: error: "Child" has no attribute "good_method"
+
+[case testIncrementalCascadingChange]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+def accepts_int(a: int) -> None: pass
+accepts_int(A)
+
+[file mod2.py]
+from mod3 import B
+A = B
+
+[file mod3.py]
+from mod4 import C
+B = C
+
+[file mod4.py]
+C = 3
+
+[file mod4.py.2]
+C = "A"
+
+[rechecked mod1, mod2, mod3, mod4]
+[stale mod2, mod3, mod4]
+[out2]
+tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalBrokenCascade]
+import mod1
+
+[file mod1.py]
+import mod2
+def accept_int(a: int) -> int: return a
+accept_int(mod2.mod3.mod4.const)
+
+[file mod2.py]
+import mod3
+
+[file mod3.py]
+import mod4
+
+[file mod4.py]
+const = 3
+
+[file mod3.py.2]
+# Import to mod4 is gone!
+
+[rechecked mod1, mod2, mod3]
+[stale mod3]
+[builtins fixtures/module.pyi]
+[out2]
+tmp/mod1.py:3: error: Module has no attribute "mod4"
+
+[case testIncrementalLongBrokenCascade]
+import mod1
+
+[file mod1.py]
+import mod2
+def accept_int(a: int) -> int: return a
+accept_int(mod2.mod3.mod4.mod5.mod6.mod7.const)
+
+[file mod2.py]
+import mod3
+
+[file mod3.py]
+import mod4
+
+[file mod4.py]
+import mod5
+
+[file mod5.py]
+import mod6
+
+[file mod6.py]
+import mod7
+
+[file mod7.py]
+const = 3
+
+[file mod6.py.2]
+# Import to mod7 is gone!
+
+[rechecked mod1, mod5, mod6]
+[stale mod6]
+[builtins fixtures/module.pyi]
+[out2]
+tmp/mod1.py:3: error: Module has no attribute "mod7"
+
+[case testIncrementalNestedBrokenCascade]
+import mod1
+
+[file mod1.py]
+import mod2
+def accept_int(a: int) -> int: return a
+accept_int(mod2.mod3.mod4.const)
+
+[file mod2/__init__.py]
+import mod2.mod3 as mod3
+
+[file mod2/mod3/__init__.py]
+import mod2.mod3.mod4 as mod4
+
+[file mod2/mod3/__init__.py.2]
+# Import is gone!
+
+[file mod2/mod3/mod4.py]
+const = 3
+
+[rechecked mod1, mod2, mod2.mod3]
+[stale mod2.mod3]
+[builtins fixtures/module.pyi]
+[out2]
+tmp/mod1.py:3: error: Module has no attribute "mod4"
+
+[case testIncrementalNestedBrokenCascadeWithType1]
+import mod1, mod2.mod3.mod5
+
+[file mod1.py]
+import mod2
+def accept_int(x: int) -> None: pass
+def produce() -> mod2.CustomType:
+    return mod2.CustomType()
+a = produce()
+accept_int(a.foo())
+
+[file mod2/__init__.py]
+from mod2.mod3 import CustomType
+
+[file mod2/mod3/__init__.py]
+from mod2.mod3.mod4 import CustomType
+
+[file mod2/mod3/__init__.py.2]
+# Import a different class that also happens to be called 'CustomType'
+from mod2.mod3.mod5 import CustomType
+def produce() -> CustomType:
+    return CustomType()
+
+[file mod2/mod3/mod4.py]
+class CustomType:
+    def foo(self) -> int: return 1
+
+[file mod2/mod3/mod5.py]
+class CustomType:
+    def foo(self) -> str: return "a"
+
+[rechecked mod1, mod2, mod2.mod3]
+[stale mod2, mod2.mod3]
+[builtins fixtures/module.pyi]
+[out1]
+[out2]
+tmp/mod1.py:6: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalNestedBrokenCascadeWithType2]
+import mod1, mod2.mod3.mod5
+
+[file mod1.py]
+from mod2 import produce
+def accept_int(x: int) -> None: pass
+a = produce()
+accept_int(a.foo())
+
+[file mod2/__init__.py]
+from mod2.mod3 import produce
+
+[file mod2/mod3/__init__.py]
+from mod2.mod3.mod4 import CustomType
+def produce() -> CustomType:
+    return CustomType()
+
+[file mod2/mod3/__init__.py.2]
+# Import a different class that also happens to be called 'CustomType'
+from mod2.mod3.mod5 import CustomType
+def produce() -> CustomType:
+    return CustomType()
+
+[file mod2/mod3/mod4.py]
+class CustomType:
+    def foo(self) -> int: return 1
+
+[file mod2/mod3/mod5.py]
+class CustomType:
+    def foo(self) -> str: return "a"
+
+[rechecked mod1, mod2, mod2.mod3]
+[stale mod2.mod3]
+[builtins fixtures/module.pyi]
+[out1]
+[out2]
+tmp/mod1.py:4: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalRemoteChange]
+import mod1
+
+[file mod1.py]
+import mod2
+def accepts_int(a: int) -> None: pass
+accepts_int(mod2.mod3.mod4.const)
+
+[file mod2.py]
+import mod3
+
+[file mod3.py]
+import mod4
+
+[file mod4.py]
+const = 3
+
+[file mod4.py.2]
+const = "foo"
+
+[rechecked mod1, mod3, mod4]
+[stale mod4]
+[out2]
+tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalBadChange]
+import mod1
+
+[file mod1.py]
+from mod2 import func2
+
+def func1() -> int:
+    return func2()
+
+[file mod2.py]
+def func2() -> int:
+    return 1
+
+[file mod2.py.2]
+def func2() -> str:
+    return "foo"
+
+[rechecked mod1, mod2]
+[stale mod2]
+[out2]
+tmp/mod1.py:4: error: Incompatible return value type (got "str", expected "int")
+
+[case testIncrementalBadChangeWithSave]
+import mod0
+
+[file mod0.py]
+import mod1
+A = mod1.func2()
+
+[file mod1.py]
+from mod2 import func2
+
+def func1() -> int:
+    return func2()
+
+[file mod2.py]
+def func2() -> int:
+    return 1
+
+[file mod2.py.2]
+def func2() -> str:
+    return "foo"
+
+[rechecked mod0, mod1, mod2]
+[stale mod2]
+[out2]
+tmp/mod1.py:4: error: Incompatible return value type (got "str", expected "int")
+
+[case testIncrementalOkChangeWithSave]
+import mod0
+
+[file mod0.py]
+import mod1
+A = mod1.func2()
+
+[file mod1.py]
+from mod2 import func2
+
+def func1() -> int:
+    func2()
+    return 1
+
+[file mod2.py]
+def func2() -> int:
+    return 1
+
+[file mod2.py.2]
+def func2() -> str:
+    return "foo"
+
+[rechecked mod0, mod1, mod2]
+[stale mod0, mod2]
+[out2]
+
+[case testIncrementalWithComplexDictExpression]
+import mod1
+
+[file mod1.py]
+import mod1_private
+
+[file mod1_private.py]
+my_dict = {
+    'a': [1, 2, 3],
+    'b': [4, 5, 6]
+}
+
+[file mod1_private.py.2]
+my_dict = {
+    'a': [1, 2, 3],
+    'b': [4, 5, 'a']
+}
+
+[rechecked mod1, mod1_private]
+[stale mod1_private]
+[builtins fixtures/dict.pyi]
+
+[case testIncrementalWithComplexConstantExpressionNoAnnotation]
+import mod1
+
+[file mod1.py]
+import mod1_private
+
+[file mod1_private.py]
+def foobar() -> int: return 1
+def baz() -> int: return 2
+const = 1 + foobar()
+
+[file mod1_private.py.2]
+def foobar() -> int: return 1
+def baz() -> int: return 2
+const = 1 + baz()
+
+[rechecked mod1_private]
+[stale]
+
+[case testIncrementalWithComplexConstantExpressionWithAnnotation]
+import mod1
+
+[file mod1.py]
+import mod1_private
+
+[file mod1_private.py]
+def foobar() -> int: return 1
+def baz() -> int: return 2
+const = 1 + foobar()  # type: int
+
+[file mod1_private.py.2]
+def foobar() -> int: return 1
+def baz() -> int: return 2
+const = 1 + baz()  # type: int
+
+[rechecked mod1_private]
+[stale]
+
+[case testIncrementalSmall]
+import mod1
+
+[file mod1.py]
+import mod1_private
+def accepts_int(a: int) -> None: pass
+accepts_int(mod1_private.some_func(12))
+
+[file mod1_private.py]
+def some_func(a: int) -> int:
+    return 1
+
+[file mod1_private.py.2]
+def some_func(a: int) -> str:
+    return "a"
+
+[rechecked mod1, mod1_private]
+[stale mod1_private]
+[builtins fixtures/ops.pyi]
+[out2]
+tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalWithDecorators]
+import mod1
+
+[file mod1.py]
+import mod1_private
+def accepts_int(a: int) -> None: pass
+accepts_int(mod1_private.some_func(12))
+
+[file mod1_private.py]
+from typing import Callable
+def multiply(f: Callable[[int], int]) -> Callable[[int], int]:
+    return lambda a: f(a) * 10
+
+def stringify(f: Callable[[int], int]) -> Callable[[int], str]:
+    return lambda a: str(f(a))
+
+ at multiply
+def some_func(a: int) -> int:
+    return a + 2
+
+[file mod1_private.py.2]
+from typing import Callable
+def multiply(f: Callable[[int], int]) -> Callable[[int], int]:
+    return lambda a: f(a) * 10
+
+def stringify(f: Callable[[int], int]) -> Callable[[int], str]:
+    return lambda a: str(f(a))
+
+ at stringify
+def some_func(a: int) -> int:
+    return a + 2
+[rechecked mod1, mod1_private]
+[stale mod1_private]
+[builtins fixtures/ops.pyi]
+[out2]
+tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalChangingClassAttributes]
+import mod1
+
+[file mod1.py]
+import mod2
+mod2.Foo.A
+
+[file mod2.py]
+class Foo:
+    A = 3
+
+[file mod2.py.2]
+class Foo:
+    A = "hello"
+
+[rechecked mod1, mod2]
+[stale mod2]
+
+[case testIncrementalChangingFields]
+import mod1
+
+[file mod1.py]
+import mod2
+f = mod2.Foo()
+f.A
+
+[file mod2.py]
+class Foo:
+    def __init__(self) -> None:
+        self.A = 3
+
+[file mod2.py.2]
+class Foo:
+    def __init__(self) -> None:
+        self.A = "hello"
+
+[rechecked mod1, mod2]
+[stale mod2]
+[out2]
+
+[case testIncrementalChangingFieldsWithAssignment]
+import mod1
+
+[file mod1.py]
+import mod2
+f = mod2.Foo()
+B = f.A
+
+[file mod2.py]
+class Foo:
+    def __init__(self) -> None:
+        self.A = 3
+
+[file mod2.py.2]
+class Foo:
+    def __init__(self) -> None:
+        self.A = "hello"
+
+[rechecked mod1, mod2]
+[stale mod1, mod2]
+
+[case testIncrementalCheckingChangingFields]
+import mod1
+
+[file mod1.py]
+import mod2
+def accept_int(a: int) -> int: return a
+f = mod2.Foo()
+accept_int(f.A)
+
+[file mod2.py]
+class Foo:
+    def __init__(self) -> None:
+        self.A = 3
+
+[file mod2.py.2]
+class Foo:
+    def __init__(self) -> None:
+        self.A = "hello"
+
+[rechecked mod1, mod2]
+[stale mod2]
+[out2]
+tmp/mod1.py:4: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalNestedClassDefinition]
+import mod1
+
+[file mod1.py]
+import mod2
+b = mod2.Foo.Bar()
+b.attr
+
+[file mod2.py]
+class Foo:
+    class Bar:
+        attr = 3
+
+[file mod2.py.2]
+class Foo:
+    class Bar:
+        attr = "foo"
+
+[rechecked mod1, mod2]
+[stale mod2]
+
+[case testIncrementalSimpleBranchingModules]
+import mod1
+import mod2
+
+[file mod1.py]
+def func() -> None: pass
+
+[file mod2.py]
+def func() -> None: pass
+
+[file mod1.py.2]
+def func() -> int: return 1
+
+[rechecked mod1]
+[stale mod1]
+
+[case testIncrementalSubmoduleImport]
+from parent.childA import Foo
+
+def func1() -> Foo:
+    return Foo()
+
+[file parent/__init__.py]
+from parent.childA import Foo
+from parent.childB import Bar
+
+__all__ = ['Foo', 'Bar']
+
+[file parent/childA.py]
+import parent
+
+class Foo:
+    def test(self) -> int:
+        return parent.Bar().test()
+
+[file parent/childB.py]
+class Bar:
+    def test(self) -> int: return 3
+
+[builtins fixtures/module_all.pyi]
+[rechecked]
+[stale]
+
+[case testIncrementalSubmoduleWithAttr]
+import mod.child
+x = mod.child.Foo()
+x.bar()
+
+[file mod/__init__.py]
+
+[file mod/child.py]
+class Foo:
+    def bar(self) -> None: pass
+[builtins fixtures/module.pyi]
+[rechecked]
+[stale]
+
+[case testIncrementalNestedSubmoduleImportFromWithAttr]
+from mod1.mod2 import mod3
+def accept_int(a: int) -> None: pass
+
+accept_int(mod3.val3)
+
+[file mod1/__init__.py]
+val1 = 1
+
+[file mod1/mod2/__init__.py]
+val2 = 1
+
+[file mod1/mod2/mod3.py]
+val3 = 1
+
+[builtins fixtures/module.pyi]
+[rechecked]
+[stale]
+
+[case testIncrementalNestedSubmoduleWithAttr]
+import mod1.mod2.mod3
+def accept_int(a: int) -> None: pass
+
+accept_int(mod1.mod2.mod3.val3)
+accept_int(mod1.mod2.val2)
+accept_int(mod1.val1)
+
+[file mod1/__init__.py]
+val1 = 1
+
+[file mod1/mod2/__init__.py]
+val2 = 1
+
+[file mod1/mod2/mod3.py]
+val3 = 1
+
+[builtins fixtures/module.pyi]
+[rechecked]
+[stale]
+
+[case testIncrementalSubmoduleParentWithImportFrom]
+import parent
+
+[file parent/__init__.py]
+from parent import a
+
+[file parent/a.py]
+val = 3
+
+[builtins fixtures/args.pyi]
+[stale]
+
+[case testIncrementalSubmoduleParentBackreference]
+import parent
+
+[file parent/__init__.py]
+from parent import a
+
+[file parent/a.py]
+import parent.b
+
+[file parent/b.py]
+
+[builtins fixtures/args.pyi]
+[stale]
+
+[case testIncrementalSubmoduleParentBackreferenceComplex]
+import parent
+
+[file parent/__init__.py]
+import parent.a
+
+[file parent/a.py]
+import parent.b
+import parent.c
+
+[file parent/b.py]
+import parent.a
+
+[file parent/c.py]
+import parent.a
+
+[builtins fixtures/args.pyi]
+[stale]
+
+[case testIncrementalReferenceNewFileWithImportFrom]
+from parent import a
+
+[file parent/__init__.py]
+
+[file parent/a.py]
+
+[file parent/a.py.2]
+from parent import b
+
+[file parent/b.py.2]
+
+[stale parent, parent.a, parent.b]
+
+[case testIncrementalReferenceExistingFileWithImportFrom]
+from parent import a, b
+
+[file parent/__init__.py]
+
+[file parent/a.py]
+
+[file parent/b.py]
+
+[file parent/a.py.2]
+from parent import b
+
+[stale parent.a]
+
+[case testIncrementalWithTypeIgnoreOnDirectImport]
+import a, b
+
+[file a.py]
+import b  # type: ignore
+
+[file b.py]
+import c
+
+[file c.py]
+
+[stale]
+
+[case testIncrementalWithTypeIgnoreOnImportFrom]
+import a, b
+
+[file a.py]
+from b import something # type: ignore
+
+[file b.py]
+import c
+something = 3
+
+[file c.py]
+
+[stale]
+
+[case testIncrementalWithPartialTypeIgnore]
+import a  # type: ignore
+import a.b
+
+[file a/__init__.py]
+
+[file a/b.py]
+
+[stale]
+
+[case testIncrementalAnyIsDifferentFromIgnore]
+import b
+
+[file b.py]
+from typing import Any
+import a.b
+
+[file b.py.2]
+from typing import Any
+
+a = 3  # type: Any
+import a.b
+
+[file a/__init__.py]
+
+[file a/b.py]
+
+[rechecked b]
+[stale]
+[out2]
+tmp/b.py:4: error: Name 'a' already defined
+
+[case testIncrementalSilentImportsAndImportsInClass]
+# flags: --ignore-missing-imports
+class MyObject(object):
+    from bar import FooBar
+[stale]
+
+[case testIncrementalSameFileSize]
+import m
+
+[file m.py]
+def foo(a: int) -> None: pass
+def bar(a: str) -> None: pass
+
+foo(3)
+
+[file m.py.2]
+def foo(a: int) -> None: pass
+def bar(a: str) -> None: pass
+
+bar(3)
+
+[rechecked m]
+[stale]
+[out2]
+tmp/m.py:4: error: Argument 1 to "bar" has incompatible type "int"; expected "str"
+
+[case testIncrementalUnsilencingModule]
+# cmd: mypy -m main package.subpackage.mod2
+# cmd2: mypy -m main package.subpackage.mod1
+# flags: --follow-imports=skip
+
+[file main.py]
+from package.subpackage.mod1 import Class
+
+def handle(c: Class) -> None:
+    c.some_attribute
+
+[file package/__init__.py]
+# empty
+
+[file package/subpackage/__init__.py]
+# empty
+
+[file package/subpackage/mod1.py]
+import collections # Any previously unloaded package works here
+
+class Class: pass
+
+[file package/subpackage/mod2.py]
+# empty
+
+[builtins fixtures/args.pyi]
+[rechecked collections, main, package.subpackage.mod1]
+[stale collections, package.subpackage.mod1]
+[out2]
+tmp/main.py:4: error: "Class" has no attribute "some_attribute"
+
+[case testIncrementalWithIgnores]
+import foo # type: ignore
+
+[builtins fixtures/module.pyi]
+[stale]
+
+[case testIncrementalWithSilentImportsAndIgnore]
+# cmd: mypy -m main b
+# cmd2: mypy -m main c c.submodule
+# flags: --follow-imports=skip
+
+[file main.py]
+import a  # type: ignore
+import b
+import c
+
+a.A().foo()
+b.B().foo()
+c.C().foo()
+
+[file b.py]
+class B:
+    def foo(self) -> None: pass
+
+[file b.py.2]
+
+[file c/__init__.py]
+class C: pass
+
+[file c/submodule.py]
+val = 3  # type: int
+val = "foo"
+
+[builtins fixtures/module_all.pyi]
+[rechecked main, c, c.submodule]
+[stale]
+[out2]
+tmp/c/submodule.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/main.py:7: error: "C" has no attribute "foo"
+
+[case testIncrementalRemoteError]
+import m
+m.C().foo().bar()
+[file m.py]
+import n
+class C:
+  def foo(self) -> n.A: pass
+[file n.py]
+class A:
+  def bar(self): pass
+[file n.py.2]
+class A:
+  pass
+[rechecked m, n]
+[stale n]
+[out2]
+main:2: error: "A" has no attribute "bar"
+
+[case testIncrementalRemoteErrorFixed]
+import m
+m.C().foo().bar()
+[file m.py]
+import n
+class C:
+  def foo(self) -> n.A: pass
+[file n.py]
+class A:
+  pass
+[file n.py.2]
+class A:
+  def bar(self): pass
+[rechecked m, n]
+[stale n]
+[out1]
+main:2: error: "A" has no attribute "bar"
+
+[case testIncrementalChangedError]
+import m
+[file m.py]
+import n
+def accept_int(x: int) -> None: pass
+accept_int(n.foo)
+[file n.py]
+foo = "hello"
+reveal_type(foo)
+[file n.py.2]
+foo = 3.14
+reveal_type(foo)
+[rechecked m, n]
+[stale]
+[out1]
+tmp/n.py:2: error: Revealed type is 'builtins.str'
+tmp/m.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+[out2]
+tmp/n.py:2: error: Revealed type is 'builtins.float'
+tmp/m.py:3: error: Argument 1 to "accept_int" has incompatible type "float"; expected "int"
+
+[case testIncrementalReplacingImports]
+import good, bad, client
+
+[file good.py]
+def foo(a: int) -> None: pass
+
+[file bad.py]
+def foo(a: str) -> None: pass
+
+[file client.py]
+import good
+import bad
+from good import foo
+foo(3)
+
+[file client.py.2]
+import good
+import bad
+from bad import foo
+foo(3)
+
+[rechecked client]
+[stale]
+[out2]
+tmp/client.py:4: error: Argument 1 to "foo" has incompatible type "int"; expected "str"
+
+[case testIncrementalChangingAlias]
+import m1, m2, m3, m4, m5
+
+[file m1.py]
+from m2 import A
+def accepts_int(x: int) -> None: pass
+accepts_int(A())
+
+[file m2.py]
+from m3 import A
+
+[file m3.py]
+from m4 import B
+A = B
+
+[file m3.py.2]
+from m5 import C
+A = C
+
+[file m4.py]
+def B() -> int:
+    return 42
+
+[file m5.py]
+def C() -> str:
+    return "hello"
+
+[rechecked m1, m2, m3]
+[stale m3]
+[out2]
+tmp/m1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalStoresAliasTypeVars]
+import a
+
+[file mod.py]
+from typing import TypeVar, Union
+T = TypeVar('T')
+Alias = Union[int, T]
+x: Alias[str]
+
+[file a.py]
+from mod import Alias, x
+
+[file a.py.2]
+from mod import Alias, x
+
+reveal_type(x)
+y: Alias[int]
+reveal_type(y)
+[out2]
+tmp/a.py:3: error: Revealed type is 'Union[builtins.int, builtins.str]'
+tmp/a.py:5: error: Revealed type is 'Union[builtins.int, builtins.int]'
+
+[case testIncrementalSilentImportsWithBlatantError]
+# cmd: mypy -m main
+# flags: --follow-imports=skip
+
+[file main.py]
+from evil import Hello
+
+[file main.py.2]
+from evil import Hello
+reveal_type(Hello())
+
+[file evil.py]
+def accept_int(x: int) -> None: pass
+accept_int("not an int")
+
+[rechecked main]
+[stale]
+[out2]
+tmp/main.py:2: error: Revealed type is 'Any'
+
+[case testIncrementalImportIsNewlySilenced]
+# cmd: mypy -m main foo
+# cmd2: mypy -m main
+# flags: --follow-imports=skip
+
+[file main.py]
+from foo import bar
+def accept_int(x: int) -> None: pass
+accept_int(bar)
+
+[file foo.py]
+bar = 3
+
+[file foo.py.2]
+# Empty!
+
+[rechecked main]
+[stale main]
+
+[case testIncrementalSilencedModuleNoLongerCausesError]
+# cmd: mypy -m main evil
+# cmd2: mypy -m main
+# flags: --follow-imports=skip
+
+[file main.py]
+from evil import bar
+def accept_int(x: int) -> None: pass
+accept_int(bar)
+reveal_type(bar)
+
+[file evil.py]
+bar = "str"
+
+[rechecked main]
+[stale]
+[out1]
+tmp/main.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+tmp/main.py:4: error: Revealed type is 'builtins.str'
+[out2]
+tmp/main.py:4: error: Revealed type is 'Any'
+
+[case testIncrementalFixedBugCausesPropagation]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+val = A().makeB().makeC().foo()
+reveal_type(val)
+
+[file mod2.py]
+from mod3 import B
+class A:
+    def makeB(self) -> B: return B()
+
+[file mod3.py]
+from mod4 import C
+class B:
+    def makeC(self) -> C:
+        val = 3  # type: int
+        val = "str"   # deliberately triggering error
+        return C()
+
+[file mod3.py.2]
+from mod4 import C
+class B:
+    def makeC(self) -> C: return C()
+
+[file mod4.py]
+class C:
+    def foo(self) -> int: return 1
+
+[rechecked mod3, mod2, mod1]
+[stale mod3, mod2]
+[out1]
+tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/mod1.py:3: error: Revealed type is 'builtins.int'
+
+[out2]
+tmp/mod1.py:3: error: Revealed type is 'builtins.int'
+
+[case testIncrementalIncidentalChangeWithBugCausesPropagation]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+val = A().makeB().makeC().foo()
+reveal_type(val)
+
+[file mod2.py]
+from mod3 import B
+class A:
+    def makeB(self) -> B: return B()
+
+[file mod3.py]
+from mod4 import C
+class B:
+    def makeC(self) -> C:
+        val = 3  # type: int
+        val = "str"   # deliberately triggering error
+        return C()
+
+[file mod4.py]
+class C:
+    def foo(self) -> int: return 1
+
+[file mod4.py.2]
+class C:
+    def foo(self) -> str: return 'a'
+
+[rechecked mod4, mod3, mod2, mod1]
+[stale mod4]
+[out1]
+tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/mod1.py:3: error: Revealed type is 'builtins.int'
+
+[out2]
+tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/mod1.py:3: error: Revealed type is 'builtins.str'
+
+[case testIncrementalIncidentalChangeWithBugFixCausesPropagation]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+val = A().makeB().makeC().foo()
+reveal_type(val)
+
+[file mod2.py]
+from mod3 import B
+class A:
+    def makeB(self) -> B: return B()
+
+[file mod3.py]
+from mod4 import C
+class B:
+    def makeC(self) -> C:
+        val = 3  # type: int
+        val = "str"   # deliberately triggering error
+        return C()
+
+[file mod3.py.2]
+from mod4 import C
+class B:
+    def makeC(self) -> C: return C()
+
+[file mod4.py]
+class C:
+    def foo(self) -> int: return 1
+
+[file mod4.py.2]
+class C:
+    def foo(self) -> str: return 'a'
+
+[rechecked mod4, mod3, mod2, mod1]
+[stale mod4, mod3, mod2]
+[out1]
+tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/mod1.py:3: error: Revealed type is 'builtins.int'
+
+[out2]
+tmp/mod1.py:3: error: Revealed type is 'builtins.str'
+
+[case testIncrementalSilentImportsWithInnerImports]
+# cmd: mypy -m main foo
+# flags: --ignore-missing-imports
+
+[file main.py]
+from foo import MyClass
+m = MyClass()
+
+[file main.py.2]
+from foo import MyClass
+m = MyClass()
+reveal_type(m.val)
+
+[file foo.py]
+class MyClass:
+    def __init__(self) -> None:
+        import unrelated
+        self.val = unrelated.test()
+
+[rechecked main]
+[stale]
+[out2]
+tmp/main.py:3: error: Revealed type is 'Any'
+
+[case testIncrementalSilentImportsWithInnerImportsAndNewFile]
+# cmd: mypy -m main foo
+# cmd2: mypy -m main foo unrelated
+# flags: --follow-imports=skip
+
+[file main.py]
+from foo import MyClass
+m = MyClass()
+
+[file main.py.2]
+from foo import MyClass
+m = MyClass()
+reveal_type(m.val)
+
+[file foo.py]
+class MyClass:
+    def __init__(self) -> None:
+        import unrelated
+        self.val = unrelated.test()
+
+[file unrelated.py]
+def test() -> str: return "foo"
+
+[rechecked main, foo, unrelated]
+[stale foo, unrelated]
+[out2]
+tmp/main.py:3: error: Revealed type is 'builtins.str'
+
+[case testIncrementalWorksWithNestedClasses]
+import foo
+
+[file foo.py]
+class MyClass:
+    class NestedClass:
+        pass
+
+    class_attr = NestedClass()
+
+[rechecked]
+[stale]
+
+[case testIncrementalWorksWithNamedTuple]
+import foo
+
+[file foo.py]
+from mid import MyTuple
+def accept_int(x: int) -> None: pass
+accept_int(MyTuple(1, "b", "c").a)
+
+[file mid.py]
+from bar import MyTuple
+
+[file bar.py]
+from typing import NamedTuple
+MyTuple = NamedTuple('MyTuple', [
+    ('a', int),
+    ('b', str),
+    ('c', str)
+])
+
+[file bar.py.2]
+from typing import NamedTuple
+MyTuple = NamedTuple('MyTuple', [
+    ('b', int),  # a and b are swapped
+    ('a', str),
+    ('c', str)
+])
+
+[rechecked bar, mid, foo]
+[stale bar]
+[out2]
+tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalWorksWithNestedNamedTuple]
+import foo
+
+[file foo.py]
+from mid import Outer
+def accept_int(x: int) -> None: pass
+accept_int(Outer.MyTuple(1, "b", "c").a)
+
+[file mid.py]
+from bar import Outer
+
+[file bar.py]
+from typing import NamedTuple
+class Outer:
+    MyTuple = NamedTuple('MyTuple', [
+        ('a', int),
+        ('b', str),
+        ('c', str)
+    ])
+
+[file bar.py.2]
+from typing import NamedTuple
+class Outer:
+    MyTuple = NamedTuple('MyTuple', [
+        ('b', int),  # a and b are swapped
+        ('a', str),
+        ('c', str)
+    ])
+
+[rechecked bar, mid, foo]
+[stale bar]
+[out2]
+tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalPartialSubmoduleUpdate]
+# cmd: mypy -m a
+# cmd2: mypy -m a a.c
+# flags: --follow-imports=skip
+
+[file a/__init__.py]
+from .b import B
+from .c import C
+
+[file a/b.py]
+class B: pass
+
+[file a/c.py]
+class C: pass
+
+[file a/c.py.2]
+class C: pass
+pass
+
+[rechecked a, a.c]
+[stale a, a.c]
+[out]
+
+[case testIncrementalNestedClassRef]
+import top
+
+[file top.py]
+from funcs import callee
+from classes import Outer
+def caller(a: Outer.Inner) -> None:
+    callee(a)
+
+[file funcs.py]
+from classes import Outer
+def callee(a: Outer.Inner) -> None:
+    pass
+
+[file classes.py]
+class Outer:
+    class Inner:
+        pass
+
+[file top.py.2]
+from funcs import callee
+from classes import Outer
+def caller(a: Outer.Inner) -> int:
+    callee(a)
+    return 0
+
+[case testIncrementalLoadsParentAfterChild]
+# cmd: mypy -m r.s
+
+[file r/__init__.py]
+from . import s
+
+[file r/m.py]
+class R: pass
+
+[file r/s.py]
+from . import m
+R = m.R
+a = None  # type: R
+
+[file r/s.py.2]
+from . import m
+R = m.R
+a = None  # type: R
+
+[case testIncrementalBaseClassAttributeConflict]
+class A: pass
+class B: pass
+
+class X:
+    attr = None  # type: A
+class Y:
+    attr = None  # type: B
+class Z(X, Y): pass
+[stale]
+[out]
+main:8: error: Definition of "attr" in base class "X" is incompatible with definition in base class "Y"
+[out2]
+main:8: error: Definition of "attr" in base class "X" is incompatible with definition in base class "Y"
+
+[case testIncrementalFollowImportsSilent]
+# flags: --follow-imports=silent
+import a
+[file a.py]
+x = 0
+[file a.py.2]
+x = 0
+x + ''
+
+[case testIncrementalFollowImportsSkip]
+# flags: --follow-imports=skip
+import a
+reveal_type(a.x)
+[file a.py]
+/
+[file a.py.2]
+//
+[out]
+main:3: error: Revealed type is 'Any'
+[out2]
+main:3: error: Revealed type is 'Any'
+
+[case testIncrementalFollowImportsError]
+# flags: --follow-imports=error
+import a
+[file a.py]
+/
+[file a.py.2]
+//
+[out1]
+main:2: note: Import of 'a' ignored
+main:2: note: (Using --follow-imports=error, module not passed on command line)
+[out2]
+main:2: note: Import of 'a' ignored
+main:2: note: (Using --follow-imports=error, module not passed on command line)
+
+[case testIncrementalFollowImportsVariable]
+# flags: --config-file tmp/mypy.ini
+import a
+reveal_type(a.x)
+[file a.py]
+x = 0
+[file mypy.ini]
+[[mypy]
+follow_imports = normal
+[file mypy.ini.2]
+[[mypy]
+follow_imports = skip
+[out1]
+main:3: error: Revealed type is 'builtins.int'
+[out2]
+main:3: error: Revealed type is 'Any'
+
+[case testIncrementalNamedTupleInMethod]
+from ntcrash import nope
+[file ntcrash.py]
+from typing import NamedTuple
+class C:
+    def f(self) -> None:
+        A = NamedTuple('A', [('x', int), ('y', int)])
+[out1]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+
+[case testIncrementalNamedTupleInMethod2]
+from ntcrash import nope
+[file ntcrash.py]
+from typing import NamedTuple
+class C:
+    class D:
+        def f(self) -> None:
+            A = NamedTuple('A', [('x', int), ('y', int)])
+[out1]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+
+[case testIncrementalNamedTupleInMethod3]
+from ntcrash import nope
+[file ntcrash.py]
+from typing import NamedTuple
+class C:
+    def a(self):
+        class D:
+            def f(self) -> None:
+                A = NamedTuple('A', [('x', int), ('y', int)])
+[out1]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+
+[case testIncrementalTypedDictInMethod]
+from tdcrash import nope
+[file tdcrash.py]
+from mypy_extensions import TypedDict
+class C:
+    def f(self) -> None:
+        A = TypedDict('A', {'x': int, 'y': int})
+[builtins fixtures/dict.pyi]
+[out1]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+
+[case testIncrementalTypedDictInMethod2]
+from tdcrash import nope
+[file tdcrash.py]
+from mypy_extensions import TypedDict
+class C:
+    class D:
+        def f(self) -> None:
+            A = TypedDict('A', {'x': int, 'y': int})
+[builtins fixtures/dict.pyi]
+[out1]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+
+[case testIncrementalTypedDictInMethod3]
+from tdcrash import nope
+[file tdcrash.py]
+from mypy_extensions import TypedDict
+class C:
+    def a(self):
+        class D:
+            def f(self) -> None:
+                A = TypedDict('A', {'x': int, 'y': int})
+[builtins fixtures/dict.pyi]
+[out1]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+
+[case testIncrementalInnerClassAttrInMethod]
+import crash
+nonexisting
+[file crash.py]
+class C:
+    def f(self) -> None:
+        class A:
+            pass
+        self.a = A()
+[out1]
+main:2: error: Name 'nonexisting' is not defined
+[out2]
+main:2: error: Name 'nonexisting' is not defined
+
+[case testIncrementalInnerClassAttrInMethodReveal]
+import crash
+reveal_type(crash.C().a)
+reveal_type(crash.D().a)
+[file crash.py]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C:
+    def f(self) -> None:
+        class A:
+            pass
+        self.a = A()
+reveal_type(C().a)
+class D:
+    def f(self) -> None:
+        class A:
+            def g(self) -> None:
+                class B(Generic[T]):
+                    pass
+                self.b = B[int]()
+        self.a = A().b
+reveal_type(D().a)
+[out1]
+tmp/crash.py:8: error: Revealed type is 'crash.A at 5'
+tmp/crash.py:17: error: Revealed type is 'crash.B at 13[builtins.int*]'
+main:2: error: Revealed type is 'crash.A at 5'
+main:3: error: Revealed type is 'crash.B at 13[builtins.int*]'
+[out2]
+tmp/crash.py:8: error: Revealed type is 'crash.A at 5'
+tmp/crash.py:17: error: Revealed type is 'crash.B at 13[builtins.int*]'
+main:2: error: Revealed type is 'crash.A at 5'
+main:3: error: Revealed type is 'crash.B at 13[builtins.int*]'
+
+[case testGenericMethodRestoreMetaLevel]
+from typing import Dict
+
+d = {}  # type: Dict[str, int]
+g = d.get  # This should not crash: see https://github.com/python/mypy/issues/2804
+[builtins fixtures/dict.pyi]
+
+[case testGenericMethodRestoreMetaLevel2]
+from typing import TypeVar
+
+T = TypeVar('T')
+
+class D:
+    def m(self, x: T) -> T:
+        return x
+
+g = D().m  # This should not crash: see https://github.com/python/mypy/issues/2804
+[builtins fixtures/dict.pyi]
+
+[case testGenericMethodRestoreMetaLevel3]
+from typing import TypeVar
+T = TypeVar('T')
+
+class C:
+    def m(self, x: T) -> T:
+        return x
+
+class D(C):
+    def __init__(self) -> None:
+        self.d = super().m # This should not crash: see https://github.com/python/mypy/issues/2804
+[builtins fixtures/dict.pyi]
+
+[case testIncrementalPerFileFlags]
+# flags: --config-file tmp/mypy.ini
+import a
+[file a.py]
+pass
+[file mypy.ini]
+[[mypy]
+warn_no_return = False
+[[mypy-a]
+warn_no_return = True
+[rechecked]
+
+[case testIncrementalClassVar]
+from typing import ClassVar
+class A:
+    x = None  # type: ClassVar
+A().x = 0
+[out1]
+main:4: error: Cannot assign to class variable "x" via instance
+[out2]
+main:4: error: Cannot assign to class variable "x" via instance
+
+[case testIncrementalClassVarGone]
+import m
+m.A().x = 0
+[file m.py]
+from typing import ClassVar
+class A:
+    x = None  # type: ClassVar[int]
+[file m.py.2]
+class A:
+    x = None  # type: int
+[out1]
+main:2: error: Cannot assign to class variable "x" via instance
+
+[case testCachingClassVar]
+import b
+[file a.py]
+from typing import ClassVar
+class A:
+    x = None  # type: ClassVar[int]
+[file b.py]
+import a
+[file b.py.2]
+import a
+a.A().x = 0
+[out2]
+tmp/b.py:2: error: Cannot assign to class variable "x" via instance
+
+[case testSerializeTypedDict]
+import b
+reveal_type(b.x)
+y: b.A
+reveal_type(y)
+[file b.py]
+from mypy_extensions import TypedDict
+A = TypedDict('A', {'x': int, 'y': str})
+x: A
+[builtins fixtures/dict.pyi]
+[out1]
+main:2: error: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})'
+main:4: error: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})'
+[out2]
+main:2: error: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})'
+main:4: error: Revealed type is 'TypedDict('b.A', {'x': builtins.int, 'y': builtins.str})'
+
+[case testSerializeMetaclass]
+import b
+reveal_type(b.A.f())
+m: b.M = b.A
+reveal_type(b.a.f())
+[file b.py]
+from typing import Type
+
+class M(type):
+    def f(cls) -> int: return 0
+class A(metaclass=M): pass
+a: Type[A]
+[out]
+main:2: error: Revealed type is 'builtins.int'
+main:4: error: Revealed type is 'builtins.int'
+[out2]
+main:2: error: Revealed type is 'builtins.int'
+main:4: error: Revealed type is 'builtins.int'
+
+[case testSerializeMetaclassInImportCycle1]
+import b
+import c
+reveal_type(b.A.f())
+m: c.M = b.A
+reveal_type(b.a.f())
+[file b.py]
+from typing import Type
+from c import M
+class A(metaclass=M): pass
+a: Type[A]
+[file c.py]
+class M(type):
+    def f(cls) -> int: return 0
+[out]
+main:3: error: Revealed type is 'builtins.int'
+main:5: error: Revealed type is 'builtins.int'
+[out2]
+main:3: error: Revealed type is 'builtins.int'
+main:5: error: Revealed type is 'builtins.int'
+
+-- TODO: Add another test for metaclass in import cycle (reversed from the above test).
+--       This currently does not work.
+
+[case testDeleteFile]
+import n
+[file n.py]
+import m
+[file m.py]
+x = 1
+[delete m.py.2]
+[rechecked n]
+[stale]
+[out2]
+tmp/n.py:1: error: Cannot find module named 'm'
+tmp/n.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testDeleteFileWithinCycle]
+import a
+[file a.py]
+import b
+[file b.py]
+import c
+[file c.py]
+import a
+[file a.py.2]
+import c
+[delete b.py.2]
+[rechecked a, c]
+[stale a]
+[out2]
+
+[case testThreePassesBasic]
+import m
+[file m.py]
+def foo():
+    pass
+[file m.py.2]
+def foo() -> None:
+    pass
+[file m.py.3]
+def foo():
+    pass
+[rechecked m]
+[stale m]
+[rechecked2 m]
+[stale2 m]
+[out3]
+
+[case testThreePassesErrorInThirdPass]
+import m
+[file m.py]
+def foo():
+    pass
+[file m.py.2]
+def foo() -> None:
+    pass
+[file m.py.3]
+def foo() -> int:
+    return ''
+[rechecked m]
+[stale m]
+[rechecked2 m]
+[stale2]
+[out3]
+tmp/m.py:2: error: Incompatible return value type (got "str", expected "int")
+
+[case testThreePassesThirdPassFixesError]
+import n
+[file n.py]
+import m
+x = m.foo(1)
+[file m.py]
+def foo(x):
+    pass
+[file m.py.2]
+def foo() -> str:
+    pass
+[file m.py.3]
+def foo(x) -> int:
+    pass
+[rechecked m, n]
+[stale m]
+[rechecked2 m, n]
+[stale2 m, n]
+[out2]
+tmp/n.py:2: error: Too many arguments for "foo"
+[out3]
+
+--
+-- Quick mode
+--
+
+[case testQuickAndDirtyInterfaceChangeDoesNotPropagate]
+# flags: --quick-and-dirty
+import b, c
+[file a.py]
+def a(): pass
+[file b.py]
+import a
+import c
+[file c.py]
+import a
+import b
+[file a.py.2]
+def a(x): pass
+[rechecked a]
+[stale a]
+
+[case testQuickAndDirtyDoesNotInvalidateImportCycle]
+# flags: --quick-and-dirty
+import b, c
+[file a.py]
+def a(): pass
+[file b.py]
+import a
+import c
+[file c.py]
+import a
+import b
+[file b.py.2]
+import a
+import c
+x = 0
+[rechecked b]
+[stale b]
+
+[case testQuickAndDirtySwitchToIncrementalMode]
+# flags: --quick-and-dirty
+# flags2: --incremental
+import a, b
+[file a.py]
+import b
+[file b.py]
+import a
+[rechecked a, b, builtins]
+[stale a, b, builtins]
+
+[case testQuickAndDirtyFixErrorInExistingFunction]
+# flags: --quick-and-dirty
+import a, b
+[file a.py]
+import b
+def foo() -> int: return ''
+[file b.py]
+import a
+[file a.py.2]
+def foo() -> int: return 0
+[out1]
+tmp/a.py:2: error: Incompatible return value type (got "str", expected "int")
+[out2]
+[rechecked a]
+[stale a]
+
+[case testQuickAndDirtyIntroduceErrorInNewFunction]
+# flags: --quick-and-dirty
+import a, b
+[file a.py]
+import b
+[file b.py]
+import a
+[file a.py.2]
+import b
+def foo() -> int: return ''
+[out1]
+[out2]
+tmp/a.py:2: error: Incompatible return value type (got "str", expected "int")
+[rechecked a]
+[stale]
+
+[case testQuickAndDirtyPersistingError]
+# flags: --quick-and-dirty
+import a, b
+[file a.py]
+import b
+def foo() -> int: return ''
+[file b.py]
+import a
+[file a.py.2]
+import b
+def foo() -> int: return 0.5
+[out1]
+tmp/a.py:2: error: Incompatible return value type (got "str", expected "int")
+[out2]
+tmp/a.py:2: error: Incompatible return value type (got "float", expected "int")
+[rechecked a]
+[stale]
+
+[case testQuickAndDirtyIntroduceReferencesWithinCycle]
+# flags: --quick-and-dirty
+import a, b
+[file a.py]
+import b
+[file b.py]
+import a
+class C: pass
+def f() -> int: pass
+[file a.py.2]
+import b
+reveal_type(b.C)
+reveal_type(b.f)
+[out1]
+[out2]
+tmp/a.py:2: error: Revealed type is 'def () -> b.C'
+tmp/a.py:3: error: Revealed type is 'def () -> builtins.int'
+[rechecked a]
+[stale]
+
+[case testQuickAndDirtyIntroduceReferencesWithinCycle2]
+# flags: --quick-and-dirty
+import a, b
+[file a.py]
+import b
+class C: pass
+def f() -> int: pass
+[file b.py]
+import a
+[file b.py.2]
+import a
+reveal_type(a.C)
+reveal_type(a.f)
+[out1]
+[out2]
+tmp/b.py:2: error: Revealed type is 'def () -> a.C'
+tmp/b.py:3: error: Revealed type is 'def () -> builtins.int'
+[rechecked b]
+[stale]
+
+[case testQuickAndDirtyIntroduceReferencesWithinCycleNoError]
+# flags: --quick-and-dirty
+import a, b, c
+[file a.py]
+import b
+[file b.py]
+import a
+class C: pass
+def f() -> int: pass
+[file c.py]
+[file a.py.2]
+import b
+def g() -> b.C: pass
+h = b.f
+[file c.py.3]
+import a
+reveal_type(a.g)
+reveal_type(a.h)
+[out1]
+[out2]
+[out3]
+tmp/c.py:2: error: Revealed type is 'def () -> b.C'
+tmp/c.py:3: error: Revealed type is 'def () -> builtins.int'
+[rechecked a]
+[stale a]
+[rechecked2 c]
+[stale2]
+
+[case testQuickAndDirtyIntroduceReferencesWithinCycleNoError2]
+# flags: --quick-and-dirty
+import a, b, c
+[file a.py]
+import b
+class C: pass
+def f() -> int: pass
+[file b.py]
+import a
+[file c.py]
+[file b.py.2]
+import a
+def g() -> a.C: pass
+h = a.f
+[file c.py.3]
+import b
+reveal_type(b.g)
+reveal_type(b.h)
+[out1]
+[out2]
+[out3]
+tmp/c.py:2: error: Revealed type is 'def () -> a.C'
+tmp/c.py:3: error: Revealed type is 'def () -> builtins.int'
+[rechecked b]
+[stale b]
+[rechecked2 c]
+[stale2]
+
+-- (The behavior for blockers is actually no different than in regular incremental mode)
+[case testQuickAndDirtyBlockerOnFirstRound]
+# flags: --quick-and-dirty
+import a, b
+[file a.py]
+import b
+class B(C): pass
+class C(B): pass  # blocker
+[file b.py]
+import a
+[file a.py.2]
+import b
+class B: pass
+class C(B): pass
+[out1]
+tmp/a.py:3: error: Cycle in inheritance hierarchy
+[out2]
+[rechecked a, b]
+[stale a, b]
+
+[case testQuickAndDirtyBlockerOnSecondRound]
+# flags: --quick-and-dirty
+import a, b
+[file a.py]
+import b
+class B: pass
+class C(B): pass
+[file b.py]
+import a
+[file a.py.2]
+import b
+class B(C): pass
+class C(B): pass  # blocker
+[out1]
+[out2]
+tmp/a.py:3: error: Cycle in inheritance hierarchy
+[rechecked a, b]
+[stale a, b]
+
+[case testQuickAndDirtyRenameFunctionInTwoModules]
+# flags: --quick-and-dirty
+import a, b, c, d
+[file a.py]
+import d
+def f(): pass
+[file b.py]
+from a import f
+[file c.py]
+from b import f
+[file d.py]
+from c import f
+[file a.py.2]
+import d
+def g(): pass  # renamed f to g
+[file c.py.2]
+from a import g
+
+[case testQuickAndDirtyUnmodifiedModuleDoesNotGenerateError]
+# flags: --quick-and-dirty
+import a, b, c, d
+[file a.py]
+import d
+class C:
+    def f(self): pass
+[file b.py]
+from a import C
+[file c.py]
+from b import C
+[file d.py]
+from c import C
+C().f()  # no error because unmodified
+[file a.py.2]
+import d
+class C:
+    def g(self): pass  # renamed f to g
+[file c.py.2]
+from a import C
+[out1]
+[out2]
+
+[case testQuickAndDirtyUnmodifiedModuleDoesNotGenerateError2]
+# flags: --quick-and-dirty
+import a, b, c
+[file a.py]
+import c
+class C:
+    x = 0
+[file b.py]
+import a
+x = a.C.x  # type: int
+[file c.py]
+import b
+x = b.x
+[file a.py.2]
+import c
+class C:
+    pass  # Removed x
+[out1]
+[out2]
+[rechecked a]
+[stale a]
+
+[case testQuickAndDirtyTypeAliasReference]
+# flags: --quick-and-dirty
+import a, b
+[file a.py]
+import b
+def f(x: b.S) -> b.S: return x
+[file b.py]
+import a
+S = str
+[file a.py.2]
+import b
+def f(x: b.S) -> int: return 0
+
+[case testQuickAndDirtyNamedTupleReference]
+# flags: --quick-and-dirty
+import a, b
+[file a.py]
+import b
+def f(x: b.P) -> b.P: return x
+[file b.py]
+from typing import NamedTuple
+import a
+P = NamedTuple('P', (('x', int),))
+[file a.py.2]
+import b
+def f(x: b.P) -> int: return 0
+
+[case testQuickAndDirtyTypeVarReference]
+# flags: --quick-and-dirty
+import a, b
+[file a.py]
+import b
+def f(x: b.T) -> b.T: return x
+[file b.py]
+from typing import TypeVar
+import a
+T = TypeVar('T')
+[file a.py.2]
+import b
+def f(x: b.T) -> int: return 0
+
+[case testQuickAndDirtyDeleteFunctionUsedByOtherModule]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+from b import f
+[file b.py]
+import a
+def f() -> int: pass
+a.f()
+[file b.py.2]
+import a
+reveal_type(a.f)
+[out2]
+tmp/b.py:2: error: Revealed type is 'Any'
+
+[case testQuickAndDirtyDeleteClassUsedInAnnotation]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+def f() -> b.C: pass
+[file b.py]
+import a
+class C: pass
+[file b.py.2]
+import a
+reveal_type(a.f)
+a.f().x
+[out2]
+tmp/b.py:2: error: Revealed type is 'def () -> <stale cache: consider running mypy without --quick>'
+tmp/b.py:3: error: "<stale cache: consider running mypy without --quick>" has no attribute "x"
+
+[case testQuickAndDirtyDeleteClassUsedAsBase]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+class D(b.C): pass
+[file b.py]
+import a
+class C: pass
+[file b.py.2]
+import a
+reveal_type(a.D)
+a.D().x
+[out2]
+tmp/b.py:2: error: Revealed type is 'Any'
+
+[case testQuickAndDirtyDeleteNestedClassUsedInAnnotation]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+def f() -> b.C.D: pass
+[file b.py]
+import a
+class C:
+    class D: pass
+[file b.py.2]
+import a
+class C:
+    pass
+reveal_type(a.f)
+a.f().x
+[out2]
+tmp/b.py:4: error: Revealed type is 'def () -> <stale cache: consider running mypy without --quick>'
+tmp/b.py:5: error: "<stale cache: consider running mypy without --quick>" has no attribute "x"
+
+[case testQuickAndDirtyTurnGenericClassIntoNonGeneric-skip]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+def f() -> b.C[int]: pass
+[file b.py]
+from typing import TypeVar, Generic
+import a
+T = TypeVar('T')
+class C(Generic[T]): pass
+[file b.py.2]
+import a
+class C: pass
+reveal_type(a.f)
+c: C
+d = a.f()
+c = d
+d = c
+[out2]
+# TODO: Crashes (https://github.com/python/mypy/issues/3279)
+
+[case testQuickAndDirtyTurnClassIntoGenericOne-skip]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+def f() -> b.C: pass
+[file b.py]
+import a
+class C: pass
+[file b.py.2]
+from typing import TypeVar, Generic
+import a
+T = TypeVar('T')
+class C(Generic[T]): pass
+reveal_type(a.f)
+c: C[int]
+d = a.f()
+d = c
+c = d
+[out2]
+# TODO: Crashes (https://github.com/python/mypy/issues/3279)
+
+[case testQuickAndDirtyDeleteTypeVarUsedInAnnotation]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+def f(x: b.T) -> b.T: return x
+[file b.py]
+from typing import TypeVar
+import a
+T = TypeVar('T')
+[file b.py.2]
+import a
+reveal_type(a.f)
+reveal_type(a.f(1))
+[out2]
+tmp/b.py:2: error: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1'
+tmp/b.py:3: error: Revealed type is 'builtins.int*'
+
+[case testQuickAndDirtyDeleteNewTypeUsedInAnnotation]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+def f() -> b.C: pass
+[file b.py]
+from typing import NewType
+import a
+C = NewType('C', int)
+[file b.py.2]
+import a
+reveal_type(a.f)
+a.f().x
+[out2]
+tmp/b.py:2: error: Revealed type is 'def () -> <stale cache: consider running mypy without --quick>'
+tmp/b.py:3: error: "<stale cache: consider running mypy without --quick>" has no attribute "x"
+
+[case testQuickAndDirtyChangeClassIntoFunction]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+def f() -> b.C: pass
+[file b.py]
+import a
+class C: pass
+[file b.py.2]
+import a
+def C() -> None: pass
+reveal_type(a.f)
+a.f().x
+[out2]
+tmp/b.py:3: error: Revealed type is 'def () -> <stale cache: consider running mypy without --quick>'
+tmp/b.py:4: error: "<stale cache: consider running mypy without --quick>" has no attribute "x"
+
+[case testQuickAndDirtyChangeClassIntoVariable]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+def f() -> b.C: pass
+[file b.py]
+import a
+class C: pass
+[file b.py.2]
+import a
+C = 0
+reveal_type(a.f)
+a.f().x
+[out2]
+tmp/b.py:3: error: Revealed type is 'def () -> <stale cache: consider running mypy without --quick>'
+tmp/b.py:4: error: "<stale cache: consider running mypy without --quick>" has no attribute "x"
+
+[case testQuickAndDirtyAddFile]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+x = ''
+[file b.py]
+import a
+[file b.py.2]
+import c
+reveal_type(c.x)
+[file c.py.2]
+import a
+x = 1
+reveal_type(a.x)
+[rechecked b, c]
+[stale]
+[out2]
+tmp/c.py:3: error: Revealed type is 'builtins.str'
+tmp/b.py:2: error: Revealed type is 'builtins.int'
+
+[case testQuickAndDirtyDeleteFile]
+# flags: --quick-and-dirty
+import b
+[file a.py]
+def f() -> None: pass
+[file b.py]
+import a
+a.f()
+[delete a.py.2]
+[file b.py.3]
+import a
+a.f() # Comment change
+[file b.py.4]
+# Remove import
+[rechecked b]
+[stale]
+[rechecked2 b]
+[stale2]
+[rechecked3 b]
+[stale3 b]
+[out2]
+tmp/b.py:1: error: Cannot find module named 'a'
+tmp/b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+[out3]
+tmp/b.py:1: error: Cannot find module named 'a'
+tmp/b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+[out4]
+
+[case testQuickAndDirtyRenameModule]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+b.f()
+[file b.py]
+def f() -> None: pass
+[delete b.py.2]
+[file c.py.2]
+def f() -> None: pass
+[file a.py.2]
+import c
+c.f(1)
+[file c.py.3]
+def f() -> None: pass # comment change
+[file c.py.4]
+def f(x) -> None: pass
+[out]
+[out2]
+tmp/a.py:2: error: Too many arguments for "f"
+[out3]
+tmp/a.py:2: error: Too many arguments for "f"
+[out4]
+[rechecked a, c]
+[stale c]
+[rechecked2 a, c]
+[stale2]
+[rechecked3 a, c]
+[stale3 a, c]
+
+[case testQuickAndDirtyMultiplePasses]
+# flags: --quick-and-dirty
+import a
+[file a.py]
+import b
+b.f()
+[file b.py]
+def f() -> None: pass
+[file b.py.2]
+# Write cache file but the error in a is not caught yet.
+def f(x) -> None: pass
+[file a.py.3]
+# Editing a triggers the error.
+import b
+b.f()
+[rechecked b]
+[rechecked2 a]
+[out2]
+[out3]
+tmp/a.py:3: error: Too few arguments for "f"
+
+[case testQuickAndDirtySerializeStaleType]
+# flags: --quick-and-dirty
+import a, c
+[file a.py]
+import b
+def f() -> b.C: pass
+[file b.py]
+import a
+class C: pass
+[file c.py]
+[file b.py.2]
+import a
+x = a.f()
+[file c.py.3]
+import b
+reveal_type(b.x)
+def g(x: object) -> None: pass
+g(b.x)
+b.x.y
+[rechecked b]
+[stale b]
+[rechecked2 c]
+[stale2]
+[out3]
+tmp/c.py:2: error: Revealed type is '<stale cache: consider running mypy without --quick>'
+tmp/c.py:5: error: "<stale cache: consider running mypy without --quick>" has no attribute "y"
+
+[case testNoCrashOnDoubleImportAliasQuick]
+# cmd: mypy -m e
+# cmd2: mypy -m c
+# cmd3: mypy -m e
+# flags: --quick
+[file c.py]
+from typing import List
+Alias = List[int]
+[file c.py.2]
+from typing import List
+Alias = int
+
+[file d.py]
+from c import Alias
+
+[file e.py]
+from d import Alias
+[file e.py.3]
+from d import Alias
+x: Alias
+[out3]
+[builtins fixtures/list.pyi]
+
+[case testSerializeAbstractPropertyIncremental]
+from abc import abstractmethod
+import typing
+class A:
+    @property
+    def f(self) -> int:
+        return 1
+    @f.setter  # type: ignore
+    @abstractmethod
+    def f(self, x: int) -> None:
+        pass
+a = A()
+[builtins fixtures/property.pyi]
+
+[case testSerializeAbstractPropertyDisallowUntypedIncremental]
+# flags: --disallow-untyped-defs
+from abc import abstractmethod
+import typing
+class A:
+    @property
+    def f(self) -> int:
+        return 1
+    @f.setter  # type: ignore
+    @abstractmethod
+    def f(self, x: int) -> None:
+        pass
+a = A()
+[builtins fixtures/property.pyi]
+
+[case testClassNamesResolutionCrashAccess]
+import mod
+
+[file mod.py]
+class C:
+    def __init__(self) -> None:
+        self.int = ''
+
+    def f(self, f: int) -> None:
+        pass
+
+[file mod.py.2]
+class C:
+    def __init__(self) -> None:
+        self.int = ''
+
+    def f(self, f: int) -> None:
+        f.x
+
+[out]
+[out2]
+tmp/mod.py:6: error: "int" has no attribute "x"
+
+[case testClassNamesResolutionCrashReadCache]
+import mod
+
+[file mod.py]
+import submod
+
+[file mod.py.2]
+from submod import C
+
+c = C()
+reveal_type(c.int)
+reveal_type(c.y)
+
+[file submod.py]
+from typing import List
+
+class C:
+    def __init__(self) -> None:
+        self.int = []  # type: List[int]
+
+    def f(self, f: int) -> None:
+        self.y = f
+
+[builtins fixtures/list.pyi]
+[out]
+[out2]
+tmp/mod.py:4: error: Revealed type is 'builtins.list[builtins.int]'
+tmp/mod.py:5: error: Revealed type is 'builtins.int'
+
+[case testClassNamesResolutionCrashReveal]
+import mod
+
+[file mod.py]
+class Foo(object):
+
+    def __init__(self) -> None:
+        self.bytes = b"foo"
+
+    def bar(self, f: bytes):
+        pass
+
+foo = Foo()
+foo.bar(b"test")
+
+[file mod.py.2]
+class Foo(object):
+
+    def __init__(self) -> None:
+        self.bytes = b"foo"
+
+    def bar(self, f: bytes):
+        reveal_type(f)
+
+foo = Foo()
+foo.bar(b"test")
+[out]
+[out2]
+tmp/mod.py:7: error: Revealed type is 'builtins.bytes'
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
new file mode 100644
index 0000000..9896c11
--- /dev/null
+++ b/test-data/unit/check-inference-context.test
@@ -0,0 +1,897 @@
+
+
+-- Basic test cases
+-- ----------------
+
+
+[case testBasicContextInference]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ab = None # type: A[B]
+ao = None # type: A[object]
+b = None # type: B
+
+ao = f()
+ab = f()
+b = f() # E: Incompatible types in assignment (expression has type A[<nothing>], variable has type "B")
+
+def f() -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+
+[case testBasicContextInferenceForConstructor]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ab = None # type: A[B]
+ao = None # type: A[object]
+b = None # type: B
+
+ao = A()
+ab = A()
+b = A() # E: Incompatible types in assignment (expression has type A[<nothing>], variable has type "B")
+
+class A(Generic[T]): pass
+class B: pass
+
+[case testIncompatibleContextInference]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+b = None # type: B
+c = None # type: C
+ab = None # type: A[B]
+ao = None # type: A[object]
+ac = None # type: A[C]
+
+ac = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
+ab = f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "B"
+
+ao = f(b)
+ab = f(b)
+ao = f(c)
+ac = f(c)
+
+def f(a: T) -> 'A[T]':
+    pass
+
+class A(Generic[T]): pass
+
+class B: pass
+class C: pass
+
+
+-- Local variables
+-- ---------------
+
+
+[case testInferGenericLocalVariableTypeWithEmptyContext]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def g() -> None:
+    ao = None # type: A[object]
+    ab = None # type: A[B]
+    o = None # type: object
+    b = None # type: B
+
+    x = f(o)
+    ab = x # E: Incompatible types in assignment (expression has type A[object], variable has type A[B])
+    ao = x
+    y = f(b)
+    ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ab = y
+
+def f(a: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+[out]
+
+[case testInferLocalVariableTypeWithUnderspecifiedGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def g() -> None:
+    x = f() # E: Need type annotation for variable
+
+def f() -> 'A[T]': pass
+class A(Generic[T]): pass
+[out]
+
+[case testInferMultipleLocalVariableTypesWithTupleRvalue]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def g() -> None:
+    ao = None # type: A[object]
+    ab = None # type: A[B]
+    b = None # type: B
+    x, y = f(b), f(b)
+    ao = x # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ab = x
+    ab = y
+
+def f(a: T) -> 'A[T]': pass
+class A(Generic[T]): pass
+class B: pass
+[out]
+
+[case testInferMultipleLocalVariableTypesWithArrayRvalueAndNesting]
+from typing import TypeVar, List, Generic
+T = TypeVar('T')
+def h() -> None:
+    ao = None # type: A[object]
+    ab = None # type: A[B]
+    b = None # type: B
+    x, y = g(f(b))
+    ao = x # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ab = x
+    ab = y
+
+def f(a: T) -> 'A[T]': pass
+def g(a: T) -> List[T]: pass
+
+class A(Generic[T]): pass
+class B: pass
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Return types with multiple tvar instances
+-- -----------------------------------------
+
+
+[case testInferenceWithTypeVariableTwiceInReturnType]
+from typing import TypeVar, Tuple, Generic
+T = TypeVar('T')
+b = None # type: B
+o = None # type: object
+ab = None # type: A[B]
+ao = None # type: A[object]
+
+ab, ao = f(b) # Fail
+ao, ab = f(b) # Fail
+
+ao, ao = f(b)
+ab, ab = f(b)
+ao, ao = f(o)
+
+def f(a: T) -> 'Tuple[A[T], A[T]]': pass
+
+class A(Generic[T]): pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+[out]
+main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+main:9: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+
+[case testInferenceWithTypeVariableTwiceInReturnTypeAndMultipleVariables]
+from typing import TypeVar, Tuple, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+b = None # type: B
+o = None # type: object
+ab = None # type: A[B]
+ao = None # type: A[object]
+
+ao, ao, ab = f(b, b)     # Fail
+ao, ab, ao = g(b, b)     # Fail
+ao, ab, ab, ab = h(b, b) # Fail
+ab, ab, ao, ab = h(b, b) # Fail
+
+ao, ab, ab = f(b, b)
+ab, ab, ao = g(b, b)
+ab, ab, ab, ab = h(b, b)
+
+def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass
+def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass
+def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass
+
+class A(Generic[T]): pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+[out]
+main:9: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+main:10: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+main:11: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+main:12: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+
+
+-- Multiple tvar instances in arguments
+-- ------------------------------------
+
+
+[case testMultipleTvatInstancesInArgs]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ac = None # type: A[C]
+ab = None # type: A[B]
+ao = None # type: A[object]
+b = None # type: B
+c = None # type: C
+o = None # type: object
+
+ab = f(b, o) # E: Argument 2 to "f" has incompatible type "object"; expected "B"
+ab = f(o, b) # E: Argument 1 to "f" has incompatible type "object"; expected "B"
+ac = f(b, c) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
+ac = f(c, b) # E: Argument 2 to "f" has incompatible type "B"; expected "C"
+
+ao = f(b, c)
+ao = f(c, b)
+ab = f(c, b)
+
+def f(a: T, b: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+class C(B): pass
+
+
+-- Nested generic function calls
+-- -----------------------------
+
+
+[case testNestedGenericFunctionCall1]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+aab = None # type: A[A[B]]
+aao = None # type: A[A[object]]
+ao = None # type: A[object]
+b = None # type: B
+o = None # type: object
+
+aab = f(f(o)) # E: Argument 1 to "f" has incompatible type "object"; expected "B"
+
+aab = f(f(b))
+aao = f(f(b))
+ao = f(f(b))
+
+def f(a: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+
+[case testNestedGenericFunctionCall2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ab = None # type: A[B]
+ao = None # type: A[object]
+b = None # type: B
+o = None # type: object
+
+ab = f(g(o)) # E: Argument 1 to "g" has incompatible type "object"; expected "B"
+
+ab = f(g(b))
+ao = f(g(b))
+
+def f(a: T) -> T: pass
+
+def g(a: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+
+[case testNestedGenericFunctionCall3]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ab = None # type: A[B]
+ao = None # type: A[object]
+b = None # type: B
+o = None # type: object
+
+ab = f(g(o), g(b)) # E: Argument 1 to "g" has incompatible type "object"; expected "B"
+ab = f(g(b), g(o)) # E: Argument 1 to "g" has incompatible type "object"; expected "B"
+
+ab = f(g(b), g(b))
+ao = f(g(b), g(o))
+ao = f(g(o), g(b))
+
+def f(a: T, b: T) -> T:
+    pass
+
+def g(a: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+
+
+-- Method calls
+-- ------------
+
+
+[case testMethodCallWithContextInference]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+o = None # type: object
+b = None # type: B
+c = None # type: C
+ao = None # type: A[object]
+ab = None # type: A[B]
+ac = None # type: A[C]
+
+ab.g(f(o))        # E: Argument 1 to "f" has incompatible type "object"; expected "B"
+ac = f(b).g(f(c)) # E: Incompatible types in assignment (expression has type A[B], variable has type A[C])
+ac = f(c).g(f(b)) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
+
+ab = f(b).g(f(c))
+ab.g(f(c))
+
+def f(a: T) -> 'A[T]': pass
+
+class A(Generic[T]):
+    def g(self, a: 'A[T]') -> 'A[T]': pass
+
+class B: pass
+class C(B): pass
+
+
+-- List expressions
+-- ----------------
+
+
+[case testEmptyListExpression]
+from typing import List
+aa = None # type: List[A]
+ao = None # type: List[object]
+a = None # type: A
+
+a = [] # E: Incompatible types in assignment (expression has type List[<nothing>], variable has type "A")
+
+aa = []
+ao = []
+
+class A: pass
+[builtins fixtures/list.pyi]
+
+[case testSingleItemListExpressions]
+from typing import List
+aa = None # type: List[A]
+ab = None # type: List[B]
+ao = None # type: List[object]
+a = None # type: A
+b = None # type: B
+
+aa = [b] # E: List item 0 has incompatible type "B"
+ab = [a] # E: List item 0 has incompatible type "A"
+
+aa = [a]
+ab = [b]
+ao = [a]
+aa = [None]
+ao = [None]
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testMultiItemListExpressions]
+from typing import List
+aa = None # type: List[A]
+ab = None # type: List[B]
+ao = None # type: List[object]
+a = None # type: A
+b = None # type: B
+
+ab = [b, a] # E: List item 1 has incompatible type "A"
+ab = [a, b] # E: List item 0 has incompatible type "A"
+
+aa = [a, b, a]
+ao = [a, b]
+
+class A: pass
+class B(A): pass
+[builtins fixtures/list.pyi]
+
+[case testLocalVariableInferenceFromEmptyList]
+import typing
+def f() -> None:
+    a = []     # E: Need type annotation for variable
+    b = [None]
+    c = [B()]
+    c = [object()] # E: List item 0 has incompatible type "object"
+    c = [B()]
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNestedListExpressions]
+from typing import List
+aao = None # type: List[List[object]]
+aab = None # type: List[List[B]]
+ab = None # type: List[B]
+b = None # type: B
+o = None # type: object
+
+aao = [[o], ab] # E: List item 1 has incompatible type List[B]
+aab = [[], [o]] # E: List item 0 has incompatible type "object"
+
+aao = [[None], [b], [], [o]]
+aab = [[None], [b], []]
+aab = [ab, []]
+
+class B: pass
+[builtins fixtures/list.pyi]
+
+
+-- Complex context
+-- ---------------
+
+
+[case testParenthesesAndContext]
+from typing import List
+l = ([A()]) # type: List[object]
+class A: pass
+[builtins fixtures/list.pyi]
+
+[case testComplexTypeInferenceWithTuple]
+from typing import TypeVar, Tuple, Generic
+k = TypeVar('k')
+t = TypeVar('t')
+v = TypeVar('v')
+def f(x: Tuple[k]) -> 'A[k]': pass
+
+d = f((A(),)) # type: A[A[B]]
+
+class A(Generic[t]): pass
+class B: pass
+class C: pass
+class D(Generic[k, v]): pass
+[builtins fixtures/list.pyi]
+
+
+-- Dictionary literals
+-- -------------------
+
+
+[case testDictionaryLiteralInContext]
+from typing import Dict, TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+class B: pass
+class C: pass
+a_b = A() # type: A[B]
+a_c = A() # type: A[C]
+d = {A() : a_c,
+     a_b : A()} # type: Dict[A[B], A[C]]
+[builtins fixtures/dict.pyi]
+
+
+-- Special cases (regression tests etc.)
+-- -------------------------------------
+
+
+[case testInitializationWithInferredGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected C[A]
+
+def f(x: T) -> T: pass
+class C(Generic[T]): pass
+class A: pass
+
+[case testInferredGenericTypeAsReturnValue]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def t() -> 'A[B]':
+    return f(D()) # E: Argument 1 to "f" has incompatible type "D"; expected "B"
+    return A()
+    return f(C())
+
+def f(a: T) -> 'A[T]': pass
+class A(Generic[T]): pass
+class B: pass
+class C(B): pass
+class D: pass
+[out]
+
+[case testIntersectionWithInferredGenericArgument]
+from foo import *
+[file foo.pyi]
+from typing import overload, TypeVar, Generic
+T = TypeVar('T')
+f(A())
+
+ at overload
+def f(x: 'A[B]') -> None: pass
+ at overload
+def f(x: 'B') -> None: pass
+class A(Generic[T]): pass
+class B: pass
+
+[case testInferenceWithAbstractClassContext]
+from typing import TypeVar, Generic
+from abc import abstractmethod, ABCMeta
+t = TypeVar('t')
+x = A() # type: I[int]
+a_object = A() # type: A[object]
+y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type A[object], variable has type I[int])
+
+class I(Generic[t]):
+    @abstractmethod
+    def f(self): pass
+class A(I[t], Generic[t]):
+    def f(self): pass
+
+[case testInferenceWithAbstractClassContext2]
+from typing import TypeVar, Generic
+from abc import abstractmethod, ABCMeta
+t = TypeVar('t')
+a = f(A()) # type: A[int]
+a_int = A() # type: A[int]
+aa = f(a_int)
+class I(Generic[t]): pass
+class A(I[t], Generic[t]): pass
+def f(i: I[t]) -> A[t]: pass
+
+[case testInferenceWithAbstractClassContext3]
+from typing import TypeVar, Generic, Iterable
+t = TypeVar('t')
+class set(Generic[t]):
+    def __init__(self, iterable: Iterable[t]) -> None: pass
+b = bool()
+l = set([b])
+l = set([object()]) # E: List item 0 has incompatible type "object"
+[builtins fixtures/for.pyi]
+
+
+-- Infer generic type in 'Any' context
+-- -----------------------------------
+
+
+[case testInferGenericTypeInAnyContext]
+from typing import Any, TypeVar, Generic
+s = TypeVar('s')
+t = TypeVar('t')
+x = [] # type: Any
+y = C() # type: Any
+class C(Generic[s, t]): pass
+[builtins fixtures/list.pyi]
+
+
+-- Lambdas
+-- -------
+
+
+[case testInferLambdaArgumentTypeUsingContext]
+from typing import Callable
+f = None # type: Callable[[B], A]
+f = lambda x: x.o
+f = lambda x: x.x # E: "B" has no attribute "x"
+class A: pass
+class B:
+  o = None # type: A
+
+[case testInferLambdaReturnTypeUsingContext]
+from typing import List, Callable
+f = None # type: Callable[[], List[A]]
+f = lambda: []
+f = lambda: [B()]  # E: List item 0 has incompatible type "B"
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testInferLambdaAsGenericFunctionArgument]
+from typing import TypeVar, List, Any, Callable
+t = TypeVar('t')
+class A:
+  x = None # type: A
+def f(a: List[t], fn: Callable[[t], Any]) -> None: pass
+list_a = [] # type: List[A]
+f(list_a, lambda a: a.x)
+[builtins fixtures/list.pyi]
+
+[case testLambdaWithoutContext]
+reveal_type(lambda x: x)  # E: Revealed type is 'def (x: Any) -> Any'
+reveal_type(lambda x: 1)  # E: Revealed type is 'def (x: Any) -> builtins.int'
+
+[case testLambdaContextVararg]
+from typing import Callable
+def f(t: Callable[[str], str]) -> str: ''
+f(lambda *_: '')
+
+[case testInvalidContextForLambda]
+from typing import Callable
+f = lambda x: A() # type: Callable[[], A]
+f2 = lambda: A() # type: Callable[[A], A]
+class A: pass
+[out]
+main:2: error: Incompatible types in assignment (expression has type Callable[[Any], A], variable has type Callable[[], A])
+main:2: error: Cannot infer type of lambda
+main:3: error: Incompatible types in assignment (expression has type Callable[[], A], variable has type Callable[[A], A])
+main:3: error: Cannot infer type of lambda
+
+[case testEllipsisContextForLambda]
+from typing import Callable
+f1 = lambda x: 1 # type: Callable[..., int]
+f2 = lambda: 1 # type: Callable[..., int]
+f3 = lambda *args, **kwargs: 1 # type: Callable[..., int]
+f4 = lambda x: x # type: Callable[..., int]
+g = lambda x: 1 # type: Callable[..., str]
+[builtins fixtures/dict.pyi]
+[out]
+main:6: error: Incompatible types in assignment (expression has type Callable[[Any], int], variable has type Callable[..., str])
+main:6: error: Incompatible return value type (got "int", expected "str")
+
+[case testEllipsisContextForLambda2]
+from typing import TypeVar, Callable
+T = TypeVar('T')
+def foo(arg: Callable[..., T]) -> None: pass
+foo(lambda: 1)
+
+[case testLambdaNoneInContext]
+from typing import Callable
+def f(x: Callable[[], None]) -> None: pass
+def g(x: Callable[[], int]) -> None: pass
+f(lambda: None)
+g(lambda: None)
+
+[case testIsinstanceInInferredLambda]
+from typing import TypeVar, Callable
+T = TypeVar('T')
+S = TypeVar('S')
+class A: pass
+class B(A): pass
+class C(A): pass
+def f(func: Callable[[T], S], *z: T, r: S = None) -> S: pass
+f(lambda x: 0 if isinstance(x, B) else 1) # E: Cannot infer type argument 1 of "f"
+f(lambda x: 0 if isinstance(x, B) else 1, A())() # E: "int" not callable
+f(lambda x: x if isinstance(x, B) else B(), A(), r=B())() # E: "B" not callable
+f( # E: Argument 1 to "f" has incompatible type Callable[[A], A]; expected Callable[[A], B]
+    lambda x: B() if isinstance(x, B) else x, # E: Incompatible return value type (got "A", expected "B")
+    A(), r=B())
+[builtins fixtures/isinstance.pyi]
+
+
+-- Overloads + generic functions
+-- -----------------------------
+
+
+[case testMapWithOverloadedFunc]
+from foo import *
+[file foo.pyi]
+from typing import TypeVar, Callable, List, overload, Any
+t = TypeVar('t')
+s = TypeVar('s')
+def map(f: Callable[[t], s], seq: List[t]) -> List[s]: pass
+
+ at overload
+def g(o: object) -> 'B': pass
+ at overload
+def g(o: 'A', x: Any = None) -> 'B': pass
+
+class A: pass
+class B: pass
+
+m = map(g, [A()])
+b = m # type: List[B]
+a = m # type: List[A] # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+[builtins fixtures/list.pyi]
+
+
+-- Boolean operators
+-- -----------------
+
+
+[case testOrOperationInferredFromContext]
+from typing import List
+a, b, c = None, None, None # type: (List[A], List[B], List[C])
+a = a or []
+a = [] or a
+b = b or [C()]
+a = a or b # E: Incompatible types in assignment (expression has type "Union[List[A], List[B]]", variable has type List[A])
+b = b or c # E: Incompatible types in assignment (expression has type "Union[List[B], List[C]]", variable has type List[B])
+
+class A: pass
+class B: pass
+class C(B): pass
+[builtins fixtures/list.pyi]
+
+
+-- Special cases
+-- -------------
+
+
+[case testSomeTypeVarsInferredFromContext]
+from typing import List, TypeVar
+t = TypeVar('t')
+s = TypeVar('s')
+# Some type variables can be inferred using context, but not all of them.
+a = None # type: List[A]
+a = f(A(), B())
+a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+def f(a: s, b: t) -> List[s]: pass
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testSomeTypeVarsInferredFromContext2]
+from typing import List, TypeVar
+s = TypeVar('s')
+t = TypeVar('t')
+# Like testSomeTypeVarsInferredFromContext, but tvars in different order.
+a = None # type: List[A]
+a = f(A(), B())
+a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+def f(a: s, b: t) -> List[s]: pass
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testLambdaInListAndHigherOrderFunction]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+map(
+  [lambda x: x], [])
+def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testChainedAssignmentInferenceContexts]
+from typing import List
+i = None # type: List[int]
+s = None # type: List[str]
+i = i = []
+i = s = [] # E: Incompatible types in assignment (expression has type List[str], variable has type List[int])
+[builtins fixtures/list.pyi]
+
+[case testContextForAttributeDeclaredInInit]
+from typing import List
+class A:
+  def __init__(self):
+    self.x = [] # type: List[int]
+a = A()
+a.x = []
+a.x = [1]
+a.x = [''] # E: List item 0 has incompatible type "str"
+[builtins fixtures/list.pyi]
+
+[case testListMultiplyInContext]
+from typing import List
+a = None  # type: List[int]
+a = [None] * 3
+a = [''] * 3 # E: List item 0 has incompatible type "str"
+[builtins fixtures/list.pyi]
+
+[case testUnionTypeContext]
+from typing import Union, List, TypeVar
+T = TypeVar('T')
+def f(x: Union[List[T], str]) -> None: pass
+f([1])
+f('')
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Union[List[<nothing>], str]"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIgnoringInferenceContext]
+from typing import TypeVar, List
+T = TypeVar('T')
+def f(x: List[T]) -> T: pass
+def g(y: object) -> None: pass
+a = [1]
+g(f(a))
+[builtins fixtures/list.pyi]
+
+[case testStar2Context]
+from typing import Any, Dict, Tuple, Iterable
+def f1(iterable: Iterable[Tuple[str, Any]] = None) -> None:
+    f2(**dict(iterable))
+def f2(iterable: Iterable[Tuple[str, Any]], **kw: Any) -> None:
+    pass
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testInferenceInGenericFunction]
+from typing import TypeVar, List
+T = TypeVar('T')
+def f(a: T) -> None:
+    l = []  # type: List[T]
+    l.append(a)
+    l.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "T"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferenceInGenericClass]
+from typing import TypeVar, Generic, List
+S = TypeVar('S')
+T = TypeVar('T')
+class A(Generic[S]):
+    def f(self, a: T, b: S) -> None:
+        l = []  # type: List[T]
+        l.append(a)
+        l.append(b) # E: Argument 1 to "append" of "list" has incompatible type "S"; expected "T"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testLambdaInGenericFunction]
+from typing import TypeVar, Callable
+T = TypeVar('T')
+S = TypeVar('S')
+def f(a: T, b: S) -> None:
+    c = lambda x: x  # type: Callable[[T], S]
+[out]
+main:5: error: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], S])
+main:5: error: Incompatible return value type (got "T", expected "S")
+
+[case testLambdaInGenericClass]
+from typing import TypeVar, Callable, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class A(Generic[T]):
+    def f(self, b: S) -> None:
+        c = lambda x: x  # type: Callable[[T], S]
+[out]
+main:6: error: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], S])
+main:6: error: Incompatible return value type (got "T", expected "S")
+
+[case testRevealTypeContext]
+from typing import TypeVar, Callable, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    pass
+reveal_type(A()) # E: Revealed type is '__main__.A[<nothing>]'
+b = reveal_type(A())  # type: A[int] # E: Revealed type is '__main__.A[builtins.int]'
+
+[case testUnionWithGenericTypeItemContext]
+from typing import TypeVar, Union, List
+
+T = TypeVar('T')
+
+def f(x: Union[T, List[int]]) -> Union[T, List[int]]: pass
+reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int*, builtins.list[builtins.int]]'
+reveal_type(f([])) # E: Revealed type is 'builtins.list[builtins.int]'
+reveal_type(f(None)) # E: Revealed type is 'builtins.list[builtins.int]'
+[builtins fixtures/list.pyi]
+
+[case testUnionWithGenericTypeItemContextAndStrictOptional]
+# flags: --strict-optional
+from typing import TypeVar, Union, List
+
+T = TypeVar('T')
+
+def f(x: Union[T, List[int]]) -> Union[T, List[int]]: pass
+reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int*, builtins.list[builtins.int]]'
+reveal_type(f([])) # E: Revealed type is 'builtins.list[builtins.int]'
+reveal_type(f(None)) # E: Revealed type is 'Union[builtins.None, builtins.list[builtins.int]]'
+[builtins fixtures/list.pyi]
+
+[case testUnionWithGenericTypeItemContextInMethod]
+from typing import TypeVar, Union, List, Generic
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class C(Generic[T]):
+    def f(self, x: Union[T, S]) -> Union[T, S]: pass
+
+c = C[List[int]]()
+reveal_type(c.f('')) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.str*]'
+reveal_type(c.f([1])) # E: Revealed type is 'builtins.list[builtins.int]'
+reveal_type(c.f([])) # E: Revealed type is 'builtins.list[builtins.int]'
+reveal_type(c.f(None)) # E: Revealed type is 'builtins.list[builtins.int]'
+[builtins fixtures/list.pyi]
+
+[case testGenericMethodCalledInGenericContext]
+from typing import TypeVar, Generic
+
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+_T = TypeVar('_T')
+
+class M(Generic[_KT, _VT]):
+    def get(self, k: _KT, default: _T) -> _T: ...
+
+def f(d: M[_KT, _VT], k: _KT) -> _VT:
+    return d.get(k, None)  # E: "get" of "M" does not return a value
+
+[case testGenericMethodCalledInGenericContext2]
+from typing import TypeVar, Generic, Union
+
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+_T = TypeVar('_T')
+
+class M(Generic[_KT, _VT]):
+    def get(self, k: _KT, default: _T) -> Union[_VT, _T]: ...
+
+def f(d: M[_KT, _VT], k: _KT) -> Union[_VT, None]:
+    return d.get(k, None)
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
new file mode 100644
index 0000000..42cd312
--- /dev/null
+++ b/test-data/unit/check-inference.test
@@ -0,0 +1,1889 @@
+-- Inferring locals/globals with simple types
+-- ------------------------------------------
+
+
+[case testInferSimpleGvarType]
+import typing
+x = A()
+y = B()
+x = B() # Fail
+x = A()
+x = y   # Fail
+x = x
+class A: pass
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testInferSimpleLvarType]
+import typing
+def f() -> None:
+  x = A()
+  y = B()
+  x = B() # Fail
+  x = A()
+  x = y   # Fail
+  x = x
+class A: pass
+class B: pass
+[out]
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:7: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testLvarInitializedToVoid]
+import typing
+def f() -> None:
+    a = g()    # E: "g" does not return a value
+    #b, c = g() # "g" does not return a value TODO
+
+def g() -> None: pass
+[out]
+
+[case testInferringLvarTypeFromArgument]
+import typing
+def f(a: 'A') -> None:
+    b = a
+    b = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = a
+    a = b
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringLvarTypeFromGvar]
+
+g = None # type: B
+
+def f() -> None:
+    a = g
+    a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = B()
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringImplicitDynamicTypeForLvar]
+import typing
+def f() -> None:
+    a = g()
+    None(a) # E: None not callable
+    a.x()
+
+def g(): pass
+[out]
+
+[case testInferringExplicitDynamicTypeForLvar]
+from typing import Any
+g = None # type: Any
+
+def f(a: Any) -> None:
+    b = g
+    None(b) # E: None not callable
+    a.x()
+[out]
+
+
+-- Inferring types of local variables with complex types
+-- -----------------------------------------------------
+
+
+[case testInferringTupleTypeForLvar]
+
+def f() -> None:
+    a = A(), B()
+    aa = None # type: A
+    bb = None # type: B
+    bb = a[0] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    aa = a[1] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    aa = a[0]
+    bb = a[1]
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testInferringTupleTypeForLvarWithNones]
+import typing
+def f() -> None:
+    a = A(), None
+    b = None, A()
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testInferringGenericTypeForLvar]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]): pass
+a_i = None # type: A[int]
+a_s = None # type: A[str]
+
+def f() -> None:
+    a_int = A() # type: A[int]
+    a = a_int
+    a = a_s # E: Incompatible types in assignment (expression has type A[str], variable has type A[int])
+    a = a_i
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testInferringFunctionTypeForLvar]
+import typing
+def f() -> None:
+    a = g
+    a(B()) # E: Argument 1 has incompatible type "B"; expected "A"
+    a(A())
+
+def g(a: 'A') -> None: pass
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringFunctionTypeForLvarFromTypeObject]
+import typing
+def f() -> None:
+    a = A
+    a(A()) # E: Too many arguments
+    a()
+    t = a # type: type
+
+class A: pass
+[out]
+
+
+-- Inferring variable types in multiple definition
+-- -----------------------------------------------
+
+
+[case testInferringLvarTypesInMultiDef]
+import typing
+def f() -> None:
+    a, b = A(), B()
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+    a = A()
+    b = B()
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringLvarTypesInTupleAssignment]
+from typing import Tuple
+def f() -> None:
+    t = None # type: Tuple[A, B]
+    a, b = t
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+    a = A()
+    b = B()
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringLvarTypesInNestedTupleAssignment1]
+from typing import Tuple
+def f() -> None:
+    t = None # type: Tuple[A, B]
+    a1, (a, b) = A(), t
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+    a = A()
+    b = B()
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringLvarTypesInNestedTupleAssignment2]
+import typing
+def f() -> None:
+    a, (b, c) = A(), (B(), C())
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    c = A() # E: Incompatible types in assignment (expression has type "A", variable has type "C")
+
+    a = A()
+    b = B()
+    c = C()
+
+class A: pass
+class B: pass
+class C: pass
+[out]
+
+[case testInferringLvarTypesInNestedListAssignment]
+import typing
+def f() -> None:
+    a, (b, c) = A(), [B(), C()]
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    c = A() # E: Incompatible types in assignment (expression has type "A", variable has type "C")
+
+    a = A()
+    b = B()
+    c = C()
+
+class A: pass
+class B: pass
+class C: pass
+[out]
+
+[case testInferringLvarTypesInMultiDefWithNoneTypes]
+import typing
+def f() -> None:
+    a, b = A(), None
+    c, d = None, A()
+
+class A: pass
+[out]
+
+[case testInferringLvarTypesInNestedTupleAssignmentWithNoneTypes]
+import typing
+def f() -> None:
+    a1, (a2, b) = A(), (A(), None)
+
+class A: pass
+[out]
+
+[case testInferringLvarTypesInMultiDefWithInvalidTuple]
+from typing import Tuple
+t = None # type: Tuple[object, object, object]
+
+def f() -> None:
+    a, b = t         # Fail
+    c, d, e, f = t   # Fail
+    g, h, i = t
+[builtins fixtures/tuple.pyi]
+[out]
+main:5: error: Too many values to unpack (2 expected, 3 provided)
+main:6: error: Need more than 3 values to unpack (4 expected)
+
+[case testInvalidRvalueTypeInInferredMultipleLvarDefinition]
+import typing
+def f() -> None:
+    a, b = f   # E: 'def ()' object is not iterable
+    c, d = A() # E: '__main__.A' object is not iterable
+class A: pass
+[builtins fixtures/for.pyi]
+[out]
+
+[case testInvalidRvalueTypeInInferredNestedTupleAssignment]
+import typing
+def f() -> None:
+    a1, (a2, b) = A(), f   # E: 'def ()' object is not iterable
+    a3, (c, d) = A(), A() # E: '__main__.A' object is not iterable
+class A: pass
+[builtins fixtures/for.pyi]
+[out]
+
+[case testInferringMultipleLvarDefinitionWithListRvalue]
+from typing import List
+
+class C: pass
+class D: pass
+
+def f() -> None:
+    list_c = [C()]
+    list_d = [D()]
+    a, b = list_c
+    c, d, e = list_d
+    a = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+    b = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+    c = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D")
+    b = c   # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+
+    a = C()
+    b = C()
+    c = D()
+    d = D()
+    e = D()
+
+    a = b
+    c = d
+    d = e
+[builtins fixtures/for.pyi]
+[out]
+
+[case testInferringNestedTupleAssignmentWithListRvalue]
+from typing import List
+
+class C: pass
+class D: pass
+
+def f() -> None:
+    list_c = [C()]
+    list_d = [D()]
+    c1, (a, b) = C(), list_c
+    c2, (c, d, e) = C(), list_d
+    a = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+    b = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+    c = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D")
+    b = c   # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+
+    a = C()
+    b = C()
+    c = D()
+    d = D()
+    e = D()
+
+    a = b
+    c = d
+    d = e
+[builtins fixtures/for.pyi]
+[out]
+
+[case testInferringMultipleLvarDefinitionWithImplicitDynamicRvalue]
+import typing
+def f() -> None:
+    a, b = g()
+    a.x
+    b.x
+def g(): pass
+
+[case testInferringMultipleLvarDefinitionWithExplicitDynamicRvalue]
+from typing import Any
+def f(d: Any) -> None:
+    a, b = d
+    a.x
+    b.x
+
+[case testInferringTypesFromIterable]
+from typing import Iterable
+class Nums(Iterable[int]):
+    def __iter__(self): pass
+    def __next__(self): pass
+a, b = Nums()
+a = b = 1
+a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+b = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[builtins fixtures/for.pyi]
+
+
+-- Type variable inference for generic functions
+-- ---------------------------------------------
+
+
+[case testInferSimpleGenericFunction]
+from typing import Tuple, TypeVar
+T = TypeVar('T')
+a = None # type: A
+b = None # type: B
+c = None # type: Tuple[A, object]
+
+b = id(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = id(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = id(c) # E: Incompatible types in assignment (expression has type "Tuple[A, object]", variable has type "A")
+
+a = id(a)
+b = id(b)
+c = id(c)
+
+def id(a: T) -> T: pass
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testInferringGenericFunctionTypeForLvar]
+from typing import TypeVar
+T = TypeVar('T')
+def f() -> None:
+    a = id
+    b = None # type: int
+    c = None # type: str
+    b = a(c) # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    b = a(b)
+    c = a(c)
+def id(x: T) -> T:
+    return x
+[out]
+
+[case testUnderspecifiedInferenceResult-skip]
+from typing import TypeVar
+T = TypeVar('T')
+class A: pass
+a = None # type: A
+
+def ff() -> None:
+    x = f() # E: Need type annotation for variable
+
+g(None) # Ok
+f()     # Ok because not used to infer local variable type
+g(a)
+
+def f() -> T: pass
+def g(a: T) -> None: pass
+[out]
+
+[case testInferenceWithMultipleConstraints]
+from typing import TypeVar
+T = TypeVar('T')
+a = None # type: A
+b = None # type: B
+
+b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(b, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(a, b)
+a = f(b, a)
+
+def f(a: T, b: T) -> T: pass
+
+class A: pass
+class B(A): pass
+
+[case testInferenceWithMultipleVariables]
+from typing import Tuple, TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+a, b = None, None # type: (A, B)
+taa = None # type: Tuple[A, A]
+tab = None # type: Tuple[A, B]
+tba = None # type: Tuple[B, A]
+
+taa = f(a, b) # Fail
+taa = f(b, a) # Fail
+tba = f(a, b) # Fail
+
+tab = f(a, b)
+tba = f(b, a)
+
+def f(a: T, b: S) -> Tuple[T, S]: pass
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+[out]
+main:9: error: Argument 2 to "f" has incompatible type "B"; expected "A"
+main:10: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+main:11: error: Argument 1 to "f" has incompatible type "A"; expected "B"
+main:11: error: Argument 2 to "f" has incompatible type "B"; expected "A"
+
+[case testConstraintSolvingWithSimpleGenerics]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ao = None # type: A[object]
+ab = None # type: A[B]
+ac = None # type: A[C]
+
+ab = f(ao) # E: Argument 1 to "f" has incompatible type A[object]; expected A[B]
+ao = f(ab) # E: Argument 1 to "f" has incompatible type A[B]; expected A[object]
+ab = f(ac) # E: Argument 1 to "f" has incompatible type A[C]; expected A[B]
+ab = g(ao) # E: Argument 1 to "g" has incompatible type A[object]; expected A[B]
+ao = g(ab) # E: Argument 1 to "g" has incompatible type A[B]; expected A[object]
+
+ab = f(ab)
+ac = f(ac)
+ao = f(ao)
+
+ab = g(ab)
+ao = g(ao)
+
+def f(a: 'A[T]') -> 'A[T]': pass
+
+def g(a: T) -> T: pass
+
+class A(Generic[T]): pass
+class B: pass
+class C: pass
+
+[case testConstraintSolvingFailureWithSimpleGenerics]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ao = None # type: A[object]
+ab = None # type: A[B]
+
+f(ao, ab) # E: Cannot infer type argument 1 of "f"
+f(ab, ao) # E: Cannot infer type argument 1 of "f"
+f(ao, ao)
+f(ab, ab)
+
+def f(a: 'A[T]', b: 'A[T]') -> None: pass
+
+class A(Generic[T]): pass
+class B: pass
+
+[case testTypeInferenceWithCalleeDefaultArgs]
+from typing import TypeVar
+T = TypeVar('T')
+a = None # type: A
+o = None # type: object
+
+a = f(o)    # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = g(a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+o = f()
+o = f(o)
+a = f(a)
+a = g(a)
+
+def f(a: T = None) -> T: pass
+def g(a: T, b: T = None) -> T: pass
+
+class A: pass
+
+
+-- Generic function inference with multiple inheritance
+-- ----------------------------------------------------
+
+
+[case testGenericFunctionInferenceWithMultipleInheritance]
+from typing import TypeVar
+
+class I: pass
+class J: pass
+
+class A(I, J): pass
+class B(I, J): pass
+class C(I): pass
+class D(J): pass
+
+T = TypeVar('T')
+def f(a: T, b: T) -> T: pass
+def g(x: I) -> None: pass
+
+a = f(A(), C())
+g(a)
+b = f(A(), B())
+g(b)
+c = f(A(), D())
+g(c) # E: Argument 1 to "g" has incompatible type "J"; expected "I"
+d = f(D(), A())
+g(d) # E: Argument 1 to "g" has incompatible type "J"; expected "I"
+e = f(D(), C())
+g(e) # E: Argument 1 to "g" has incompatible type "object"; expected "I"
+
+[case testGenericFunctionInferenceWithMultipleInheritance2]
+from typing import TypeVar
+
+class I: pass
+class J: pass
+
+class A(I): pass
+class B(A, J): pass
+class C(I, J): pass
+
+T = TypeVar('T')
+def f(a: T, b: T) -> T: pass
+def g(x: I) -> None: pass
+def h(x: J) -> None: pass
+
+a = f(B(), C())
+g(a)
+h(a) # E: Argument 1 to "h" has incompatible type "I"; expected "J"
+b = f(C(), B())
+g(b)
+h(b) # E: Argument 1 to "h" has incompatible type "I"; expected "J"
+c = f(A(), B())
+g(a)
+h(b) # E: Argument 1 to "h" has incompatible type "I"; expected "J"
+
+[case testGenericFunctionInferenceWithMultipleInheritance3]
+from typing import TypeVar
+
+class I: pass
+class J: pass
+class K(J): pass
+
+class A(K): pass
+class B(A, I): pass
+class C(I, J): pass
+
+T = TypeVar('T')
+def f(a: T, b: T) -> T: pass
+def g(x: K) -> None: pass
+
+a = f(B(), C())
+g(a) # E: Argument 1 to "g" has incompatible type "J"; expected "K"
+b = f(A(), C())
+g(b) # E: Argument 1 to "g" has incompatible type "J"; expected "K"
+c = f(A(), B())
+g(c)
+
+[case testPrecedenceOfFirstBaseAsInferenceResult]
+from typing import TypeVar
+from abc import abstractmethod, ABCMeta
+T = TypeVar('T')
+a, i, j = None, None, None # type: (A, I, J)
+
+a = f(B(), C())
+
+class I(metaclass=ABCMeta): pass
+class J(metaclass=ABCMeta): pass
+
+def f(a: T, b: T) -> T: pass
+
+class A: pass
+class B(A, I, J): pass
+class C(A, I, J): pass
+
+
+-- Generic function inference with function arguments
+-- --------------------------------------------------
+
+
+[case testNonOverloadedMapInference]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+class A: pass
+b = bool()
+def f(x: bool) -> A: pass
+def mymap(f: Callable[[t], s], a: List[t]) -> List[s]: pass
+
+l = mymap(f, [b])
+l = [A()]
+lb = [b]
+l = lb # E: Incompatible types in assignment (expression has type List[bool], variable has type List[A])
+[builtins fixtures/for.pyi]
+
+[case testGenericFunctionWithTypeTypeAsCallable]
+from typing import Callable, Type, TypeVar
+T = TypeVar('T')
+def f(x: Callable[..., T]) -> T: return x()
+class A: pass
+x = None  # type: Type[A]
+y = f(x)
+reveal_type(y)  # E: Revealed type is '__main__.A*'
+
+-- Generic function inference with unions
+-- --------------------------------------
+
+
+[case testUnionInference]
+from typing import TypeVar, Union, List
+T = TypeVar('T')
+U = TypeVar('U')
+def f(x: Union[T, int], y: T) -> T: pass
+f(1, 'a')() # E: "str" not callable
+f('a', 1)() # E: "object" not callable
+f('a', 'a')() # E: "str" not callable
+f(1, 1)() # E: "int" not callable
+
+def g(x: Union[T, List[T]]) -> List[T]: pass
+def h(x: List[str]) -> None: pass
+g('a')() # E: List[str] not callable
+
+# The next line is a case where there are multiple ways to satisfy a constraint
+# involving a Union. Either T = List[str] or T = str would turn out to be valid,
+# but mypy doesn't know how to branch on these two options (and potentially have
+# to backtrack later) and defaults to T = <nothing>. The result is an
+# awkward error message. Either a better error message, or simply accepting the
+# call, would be preferable here.
+g(['a']) # E: Argument 1 to "g" has incompatible type List[str]; expected List[<nothing>]
+
+h(g(['a']))
+
+def i(x: Union[List[T], List[U]], y: List[T], z: List[U]) -> None: pass
+a = [1]
+b = ['b']
+i(a, a, b)
+i(b, a, b)
+i(a, b, b) # E: Argument 1 to "i" has incompatible type List[int]; expected List[str]
+[builtins fixtures/list.pyi]
+
+[case testCallableListJoinInference]
+from typing import Any, Callable
+
+def fun() -> None:
+    callbacks = [
+        callback1,
+        callback2,
+    ]
+
+    for c in callbacks:
+        call(c, 1234) # this must not fail
+
+def callback1(i: int) -> int:
+    return i
+def callback2(i: int) -> str:
+    return 'hello'
+def call(c: Callable[[int], Any], i: int) -> None:
+    c(i)
+[builtins fixtures/list.pyi]
+[out]
+
+[case testCallableMeetAndJoin]
+# flags: --python-version 3.6
+from typing import Callable, Any, TypeVar
+
+class A: ...
+class B(A): ...
+
+def f(c: Callable[[B], int]) -> None: ...
+
+c: Callable[[A], int]
+d: Callable[[B], int]
+
+lst = [c, d]
+reveal_type(lst) # E: Revealed type is 'builtins.list[def (__main__.B) -> builtins.int]'
+
+T = TypeVar('T')
+def meet_test(x: Callable[[T], int], y: Callable[[T], int]) -> T: ...
+
+CA = Callable[[A], A]
+CB = Callable[[B], B]
+
+ca: Callable[[CA], int]
+cb: Callable[[CB], int]
+reveal_type(meet_test(ca, cb)) # E: Revealed type is 'def (__main__.A) -> __main__.B'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testUnionInferenceWithTypeVarValues]
+from typing import TypeVar, Union
+AnyStr = TypeVar('AnyStr', bytes, str)
+def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass
+f('foo')
+f('foo', 'bar')
+f('foo', b'bar') # E: Type argument 1 of "f" has incompatible value "object"
+f(1)
+f(1, 'foo')
+f(1, 'foo', b'bar') # E: Type argument 1 of "f" has incompatible value "object"
+[builtins fixtures/primitives.pyi]
+
+
+[case testUnionTwoPassInference-skip]
+from typing import TypeVar, Union, List
+T = TypeVar('T')
+U = TypeVar('U')
+def j(x: Union[List[T], List[U]], y: List[T]) -> List[U]: pass
+
+a = [1]
+b = ['b']
+# We could infer: Since List[str] <: List[T], we must have T = str.
+# Then since List[int] <: Union[List[str], List[U]], and List[int] is
+# not a subtype of List[str], we must have U = int.
+# This is not currently implemented.
+j(a, b)
+[builtins fixtures/list.pyi]
+
+
+[case testUnionContext]
+from typing import TypeVar, Union, List
+T = TypeVar('T')
+def f() -> List[T]: pass
+d1 = f() # type: Union[List[int], str]
+d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type List[<nothing>], variable has type "Union[int, str]")
+def g(x: T) -> List[T]: pass
+d3 = g(1) # type: Union[List[int], List[str]]
+[builtins fixtures/list.pyi]
+
+
+[case testGenericFunctionSubtypingWithUnions]
+from typing import TypeVar, Union, List
+T = TypeVar('T')
+S = TypeVar('S')
+def k1(x: int, y: List[T]) -> List[Union[T, int]]: pass
+def k2(x: S, y: List[T]) -> List[Union[T, int]]: pass
+a = k2
+a = k2
+a = k1 # E: Incompatible types in assignment (expression has type Callable[[int, List[T]], List[Union[T, int]]], variable has type Callable[[S, List[T]], List[Union[T, int]]])
+b = k1
+b = k1
+b = k2
+[builtins fixtures/list.pyi]
+
+[case testAmbiguousUnionContextAndMultipleInheritance]
+from typing import TypeVar, Union, Generic
+
+_T = TypeVar('_T')
+
+class T(Generic[_T]): pass
+class U(Generic[_T]): pass
+class V(T[_T], U[_T]): pass
+
+def wait_for(fut: Union[T[_T], U[_T]]) -> _T: ...
+
+reveal_type(wait_for(V[str]()))  # E: Revealed type is 'builtins.str*'
+
+[case testAmbiguousUnionContextAndMultipleInheritance2]
+from typing import TypeVar, Union, Generic
+
+_T = TypeVar('_T')
+_S = TypeVar('_S')
+
+class T(Generic[_T, _S]): pass
+class U(Generic[_T, _S]): pass
+class V(T[_T, _S], U[_T, _S]): pass
+
+def wait_for(fut: Union[T[_T, _S], U[_T, _S]]) -> T[_T, _S]: ...
+
+reveal_type(wait_for(V[int, str]()))  \
+    # E: Revealed type is '__main__.T[builtins.int*, builtins.str*]'
+
+
+-- Literal expressions
+-- -------------------
+
+
+[case testDictLiteral]
+from typing import Dict
+class A: pass
+class B: pass
+def d_ab() -> Dict[A, B]: return {}
+def d_aa() -> Dict[A, A]: return {}
+a, b = None, None # type: (A, B)
+d = {a:b}
+d = d_ab()
+d = d_aa() # E: Incompatible types in assignment (expression has type Dict[A, A], variable has type Dict[A, B])
+[builtins fixtures/dict.pyi]
+
+[case testSetLiteral]
+from typing import Any, Set
+a, x = None, None # type: (int, Any)
+def s_i() -> Set[int]: return set()
+def s_s() -> Set[str]: return set()
+s = {a}
+s = {x}
+s = s_i()
+s = s_s() # E: Incompatible types in assignment (expression has type Set[str], variable has type Set[int])
+[builtins fixtures/set.pyi]
+
+[case testSetWithStarExpr]
+
+s = {1, 2, *(3, 4)}
+t = {1, 2, *s}
+reveal_type(s)  # E: Revealed type is 'builtins.set[builtins.int*]'
+reveal_type(t)  # E: Revealed type is 'builtins.set[builtins.int*]'
+[builtins fixtures/set.pyi]
+
+
+-- For statements
+-- --------------
+
+
+[case testInferenceOfFor1]
+a, b = None, None # type: (A, B)
+
+for x in [A()]:
+    b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = x
+
+for y in []:
+    a = y
+    reveal_type(y)  # E: Revealed type is 'builtins.None'
+
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+
+[case testInferenceOfFor2]
+
+a, b, c = None, None, None # type: (A, B, C)
+for x, (y, z) in [(A(), (B(), C()))]:
+    b = x # Fail
+    c = y # Fail
+    a = z # Fail
+    a = x
+    b = y
+    c = z
+for xx, yy, zz in [(A(), B())]: # Fail
+    pass
+for xx, (yy, zz) in [(A(), B())]: # Fail
+    pass
+for xxx, yyy in [(None, None)]:
+    pass
+
+class A: pass
+class B: pass
+class C: pass
+[builtins fixtures/for.pyi]
+[out]
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "C")
+main:6: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:10: error: Need more than 2 values to unpack (3 expected)
+main:12: error: '__main__.B' object is not iterable
+
+[case testInferenceOfFor3]
+
+a, b = None, None # type: (A, B)
+
+for x, y in [[A()]]:
+    b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    b = y # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = x
+    a = y
+
+for e, f in [[]]:
+    reveal_type(e)  # E: Revealed type is 'builtins.None'
+    reveal_type(f)  # E: Revealed type is 'builtins.None'
+
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+
+[case testForStatementInferenceWithVoid]
+import typing
+for x in f(): # E: "f" does not return a value
+    pass
+def f() -> None: pass
+[builtins fixtures/for.pyi]
+
+[case testReusingInferredForIndex]
+import typing
+for a in [A()]: pass
+a = A()
+a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+for a in []: pass
+a = A()
+a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+
+[case testReusingInferredForIndex2]
+import typing
+def f() -> None:
+    for a in [A()]: pass
+    a = A()
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    for a in []: pass
+    a = A()
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Regression tests
+-- ----------------
+
+
+[case testMultipleAssignmentWithPartialDefinition]
+
+a = None # type: A
+x, a = a, a
+x = a
+a = x
+x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+class A: pass
+
+[case testMultipleAssignmentWithPartialDefinition2]
+
+a = None # type: A
+a, x = [a, a]
+x = a
+a = x
+x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+class A: pass
+[builtins fixtures/for.pyi]
+
+[case testMultipleAssignmentWithPartialDefinition3]
+from typing import Any, cast
+a = None # type: A
+x, a = cast(Any, a)
+x = a
+a = x
+x = object()
+a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+class A: pass
+
+[case testInferGlobalDefinedInBlock]
+import typing
+if A:
+    a = A()
+    a = A()
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+class A: pass
+class B: pass
+
+[case testAssigningAnyStrToNone]
+from typing import Tuple, TypeVar
+AnyStr = TypeVar('AnyStr', str, bytes)
+
+def f(x: AnyStr) -> Tuple[AnyStr]: pass
+x = None
+(x,) = f('')
+reveal_type(x)  # E: Revealed type is 'builtins.str'
+
+
+-- Inferring attribute types
+-- -------------------------
+
+
+[case testInferAttributeType]
+import typing
+class A:
+    a = B()
+class B: pass
+
+A().a = B()
+A().a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testInferAttributeTypeAndAssignInInit]
+import typing
+class A:
+    a = B()
+    def __init__(self) -> None:
+        self.a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+        self.a = B()
+class B: pass
+[out]
+
+[case testInferAttributeInInit]
+import typing
+class B: pass
+class A:
+    def __init__(self) -> None:
+        self.a = A()
+        self.b = B()
+a = A()
+a.a = A()
+a.b = B()
+a.a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a.b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testInferAttributeInInitUsingChainedAssignment]
+import typing
+class B: pass
+class A:
+    def __init__(self) -> None:
+        self.a = self.b = A()
+a = A()
+a.a = A()
+a.b = A()
+a.a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a.b = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+
+-- Lambdas
+-- -------
+
+
+[case testInferLambdaType]
+from typing import List, Callable
+li = [1]
+l = lambda: li
+f1 = l # type: Callable[[], List[int]]
+f2 = l # type: Callable[[], List[str]] # E: Incompatible types in assignment (expression has type Callable[[], List[int]], variable has type Callable[[], List[str]])
+[builtins fixtures/list.pyi]
+
+[case testInferLambdaType2]
+from typing import List, Callable
+l = lambda: [B()]
+f1 = l # type: Callable[[], List[B]]
+f2 = l # type: Callable[[], List[A]] # E: Incompatible types in assignment (expression has type Callable[[], List[B]], variable has type Callable[[], List[A]])
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testUninferableLambda]
+from typing import TypeVar, Callable
+X = TypeVar('X')
+def f(x: Callable[[X], X]) -> X: pass
+y = f(lambda x: x) # E: Cannot infer type argument 1 of "f"
+
+[case testUninferableLambdaWithTypeError]
+from typing import TypeVar, Callable
+X = TypeVar('X')
+def f(x: Callable[[X], X], y: str) -> X: pass
+y = f(lambda x: x, 1) # Fail
+[out]
+main:4: error: Cannot infer type argument 1 of "f"
+main:4: error: Argument 2 to "f" has incompatible type "int"; expected "str"
+
+[case testInferLambdaNone]
+from typing import Callable
+def f(x: Callable[[], None]) -> None: pass
+def g(x: Callable[[], int]) -> None: pass
+a = lambda: None
+f(a)
+g(a)
+b = lambda: None  # type: Callable[[], None]
+f(b)
+g(b)
+
+[case testLambdaDefaultContext]
+# flags: --strict-optional
+from typing import Callable
+def f(a: Callable[..., None] = lambda *a, **k: None):
+    pass
+
+def g(a: Callable[..., None] = lambda *a, **k: 1):  # E: Incompatible types in assignment (expression has type Callable[[VarArg(Any), KwArg(Any)], int], variable has type Callable[..., None])
+    pass
+[builtins fixtures/dict.pyi]
+
+[case testLambdaVarargContext]
+# Should not crash
+from typing import Callable
+def f(a: Callable[[int, int, int], int] = lambda *a, **k: 1):
+    pass
+[builtins fixtures/dict.pyi]
+
+[case testLambdaDeferredSpecialCase]
+from typing import Callable
+
+class A:
+    def f(self) -> None:
+        h(lambda: self.x)
+
+    def g(self) -> None:
+        self.x = 1
+
+def h(x: Callable[[], int]) -> None:
+    pass
+
+
+-- Boolean operators
+-- -----------------
+
+
+[case testOrOperationWithGenericOperands]
+from typing import List
+a = None # type: List[A]
+o = None # type: List[object]
+a2 = a or []
+a = a2
+a2 = o # E: Incompatible types in assignment (expression has type List[object], variable has type List[A])
+class A: pass
+[builtins fixtures/list.pyi]
+
+
+-- Accessing variable before its type has been inferred
+-- ----------------------------------------------------
+
+
+[case testAccessGlobalVarBeforeItsTypeIsAvailable]
+import typing
+x.y  # E: Cannot determine type of 'x'
+x = object()
+x.y  # E: "object" has no attribute "y"
+
+[case testAccessDataAttributeBeforeItsTypeIsAvailable]
+
+a = None # type: A
+a.x.y  # E: Cannot determine type of 'x'
+class A:
+   def __init__(self) -> None:
+       self.x = object()
+a.x.y  # E: "object" has no attribute "y"
+
+
+-- Ducktype declarations
+-- ---------------------
+
+
+[case testListWithDucktypeCompatibility]
+from typing import List, _promote
+class A: pass
+ at _promote(A)
+class B: pass
+a = None  # type: List[A]
+x1 = [A(), B()]
+x2 = [B(), A()]
+x3 = [B(), B()]
+a = x1
+a = x2
+a = x3 # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+[builtins fixtures/list.pyi]
+
+[case testListWithDucktypeCompatibilityAndTransitivity]
+from typing import List, _promote
+class A: pass
+ at _promote(A)
+class B: pass
+ at _promote(B)
+class C: pass
+a = None  # type: List[A]
+x1 = [A(), C()]
+x2 = [C(), A()]
+x3 = [B(), C()]
+a = x1
+a = x2
+a = x3 # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+[builtins fixtures/list.pyi]
+
+
+-- Inferring type of variable when initialized to an empty collection
+-- ------------------------------------------------------------------
+
+
+[case testInferListInitializedToEmpty]
+a = []
+a.append(1)
+a.append('')  # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyUsingUpdate]
+a = []
+a.extend([''])
+a.append(0)  # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndNotAnnotated]
+a = []  # E: Need type annotation for variable
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndReadBeforeAppend]
+a = []  # E: Need type annotation for variable
+if a: pass
+a.xyz
+a.append('')
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndIncompleteTypeInAppend]
+a = [] # E: Need type annotation for variable
+a.append([])
+a()
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndMultipleAssignment]
+a, b = [], []
+a.append(1)
+b.append('')
+a() # E: List[int] not callable
+b() # E: List[str] not callable
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyInFunction]
+def f() -> None:
+   a = []
+   a.append(1)
+   a.append('')  # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndNotAnnotatedInFunction]
+def f() -> None:
+    a = []  # E: Need type annotation for variable
+
+def g() -> None: pass
+
+a = []
+a.append(1)
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndReadBeforeAppendInFunction]
+def f() -> None:
+    a = []  # E: Need type annotation for variable
+    if a: pass
+    a.xyz
+    a.append('')
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyInClassBody]
+class A:
+   a = []
+   a.append(1)
+   a.append('')  # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndNotAnnotatedInClassBody]
+class A:
+    a = []  # E: Need type annotation for variable
+
+class B:
+    a = []
+    a.append(1)
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyInMethod]
+class A:
+    def f(self) -> None:
+        a = []
+        a.append(1)
+        a.append('')  # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndNotAnnotatedInMethod]
+class A:
+    def f(self) -> None:
+        a = []  # E: Need type annotation for variable
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyInMethodViaAttribute]
+class A:
+    def f(self) -> None:
+        # Attributes aren't supported right now.
+        self.a = [] # E: Need type annotation for variable
+        self.a.append(1)
+        self.a.append('')
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyInClassBodyAndOverriden]
+from typing import List
+
+class A:
+    def __init__(self) -> None:
+        self.x = [] # E: Need type annotation for variable
+
+class B(A):
+    @property
+    def x(self) -> List[int]:
+        return [123]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferSetInitializedToEmpty]
+a = set()
+a.add(1)
+a.add('')  # E: Argument 1 to "add" of "set" has incompatible type "str"; expected "int"
+[builtins fixtures/set.pyi]
+[out]
+
+[case testInferSetInitializedToEmptyUsingDiscard]
+a = set()
+a.discard('')
+a.add(0)  # E: Argument 1 to "add" of "set" has incompatible type "int"; expected "str"
+[builtins fixtures/set.pyi]
+[out]
+
+[case testInferSetInitializedToEmptyUsingUpdate]
+a = set()
+a.update({0})
+a.add('')  # E: Argument 1 to "add" of "set" has incompatible type "str"; expected "int"
+[builtins fixtures/set.pyi]
+[out]
+
+[case testInferDictInitializedToEmpty]
+a = {}
+a[1] = ''
+a() # E: Dict[int, str] not callable
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testInferDictInitializedToEmptyUsingUpdate]
+a = {}
+a.update({'': 42})
+a() # E: Dict[str, int] not callable
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testInferDictInitializedToEmptyUsingUpdateError]
+a = {}  # E: Need type annotation for variable
+a.update([1, 2])
+a()
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testInferDictInitializedToEmptyAndIncompleteTypeInUpdate]
+a = {} # E: Need type annotation for variable
+a[1] = {}
+b = {} # E: Need type annotation for variable
+b[{}] = 1
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testInferDictInitializedToEmptyAndUpdatedFromMethod]
+map = {}
+def add():
+    map[1] = 2
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testSpecialCaseEmptyListInitialization]
+def f(blocks: Any): # E: Name 'Any' is not defined
+    to_process = [] # E: Need type annotation for variable
+    to_process = list(blocks)
+[builtins fixtures/list.pyi]
+[out]
+
+[case testSpecialCaseEmptyListInitialization2]
+def f(blocks: object):
+    to_process = [] # E: Need type annotation for variable
+    to_process = list(blocks) # E: No overload variant of "list" matches argument types [builtins.object]
+[builtins fixtures/list.pyi]
+[out]
+
+
+-- Inferring types of variables first initialized to None (partial types)
+-- ----------------------------------------------------------------------
+
+
+[case testLocalVariablePartiallyInitializedToNone]
+def f() -> None:
+    if object():
+        x = None
+    else:
+        x = 1
+    x() # E: "int" not callable
+[out]
+
+[case testLocalVariablePartiallyTwiceInitializedToNone]
+def f() -> None:
+    if object():
+        x = None
+    elif object():
+        x = None
+    else:
+        x = 1
+    x() # E: "int" not callable
+[out]
+
+[case testLvarInitializedToNoneWithoutType]
+import typing
+def f() -> None:
+    a = None
+    a.x() # E: None has no attribute "x"
+[out]
+
+[case testGvarPartiallyInitializedToNone]
+x = None
+if object():
+    x = 1
+x() # E: "int" not callable
+
+[case testPartiallyInitializedToNoneAndThenToPartialList]
+x = None
+if object():
+    # Promote from partial None to partial list.
+    x = []
+    x.append(1)
+x.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+
+[case testPartiallyInitializedToNoneAndThenReadPartialList]
+x = None
+if object():
+    # Promote from partial None to partial list.
+    x = []  # E: Need type annotation for variable
+    x
+[builtins fixtures/list.pyi]
+
+[case testPartiallyInitializedToNoneAndPartialListAndLeftPartial]
+def f() -> None:
+    x = None
+    if object():
+        # Promote from partial None to partial list.
+        x = []  # E: Need type annotation for variable
+[builtins fixtures/list.pyi]
+[out]
+
+[case testPartiallyInitializedToNoneAndThenToIncompleteType-skip]
+# TODO(ddfisher): fix partial type bug and re-enable
+from typing import TypeVar,  Dict
+T = TypeVar('T')
+def f(*x: T) -> Dict[int, T]: pass
+x = None  # E: Need type annotation for variable
+if object():
+    x = f()
+[builtins fixtures/dict.pyi]
+
+[case testPartiallyInitializedVariableDoesNotEscapeScope1]
+def f() -> None:
+    x = None
+    reveal_type(x)  # E: Revealed type is 'builtins.None'
+x = 1
+[out]
+
+[case testPartiallyInitializedVariableDoesNotEscapeScope2]
+x = None
+def f() -> None:
+    x = None
+    x = 1
+x()  # E: None not callable
+
+[case testAttributePartiallyInitializedToNone]
+class A:
+    def f(self) -> None:
+        self.x = None
+        self.x = 1
+        self.x() # E: "int" not callable
+[out]
+
+[case testAttributePartiallyInitializedToNoneWithMissingAnnotation]
+class A:
+    def f(self) -> None:
+        self.x = None
+
+    def g(self) -> None:
+        self.x = 1
+        self.x()
+[out]
+main:6: error: Incompatible types in assignment (expression has type "int", variable has type None)
+main:7: error: None not callable
+
+[case testGlobalInitializedToNoneSetFromFunction]
+a = None
+def f():
+    global a
+    a = 42
+[out]
+
+[case testGlobalInitializedToNoneSetFromMethod]
+a = None
+class C:
+    def m(self):
+        global a
+        a = 42
+[out]
+
+-- More partial type errors
+-- ------------------------
+
+[case testPartialTypeErrorSpecialCase1]
+# This used to crash.
+class A:
+    x = None
+    def f(self) -> None:
+        for a in self.x:
+            pass
+[builtins fixtures/for.pyi]
+[out]
+main:5: error: None has no attribute "__iter__"
+
+[case testPartialTypeErrorSpecialCase2]
+# This used to crash.
+class A:
+    x = []
+    def f(self) -> None:
+        for a in self.x:
+            pass
+[builtins fixtures/for.pyi]
+[out]
+main:3: error: Need type annotation for variable
+
+[case testPartialTypeErrorSpecialCase3]
+class A:
+    x = None
+    def f(self) -> None:
+        for a in A.x:
+            pass
+[builtins fixtures/for.pyi]
+[out]
+main:4: error: None has no attribute "__iter__"
+
+
+-- Multipass
+-- ---------
+
+
+[case testMultipassAndAccessVariableBeforeDefinition]
+def f() -> None:
+    y = x
+    y() # E: "int" not callable
+x = 1
+[out]
+
+[case testMultipassAndAccessInstanceVariableBeforeDefinition]
+class A:
+    def f(self) -> None:
+        y = self.x
+        y() # E: "int" not callable
+
+    def g(self) -> None:
+        self.x = 1
+[out]
+
+[case testMultipassAndTopLevelVariable]
+y = x # E: Cannot determine type of 'x'
+y()
+x = 1+0
+[out]
+
+[case testMultipassAndDecoratedMethod]
+from typing import Callable, TypeVar
+
+T = TypeVar('T')
+
+class A:
+    def f(self) -> None:
+        self.g() # E: Too few arguments for "g" of "A"
+        self.g(1)
+    @dec
+    def g(self, x: str) -> None: pass
+
+def dec(f: Callable[[A, str], T]) -> Callable[[A, int], T]: pass
+[out]
+
+[case testMultipassAndDefineAttributeBasedOnNotReadyAttribute]
+class A:
+    def f(self) -> None:
+        self.y = self.x
+
+    def g(self) -> None:
+        self.x = 1
+
+    def h(self) -> None:
+        self.y() # E: "int" not callable
+[out]
+
+[case testMultipassAndDefineAttributeBasedOnNotReadyAttribute2]
+class A:
+    def f(self) -> None:
+        self.y = self.x
+        self.z = self.y
+        self.z() # E
+        self.y() # E
+
+    def g(self) -> None:
+        self.x = 1
+
+    def h(self) -> None:
+        self.y() # E
+[out]
+main:5: error: "int" not callable
+main:6: error: "int" not callable
+main:12: error: "int" not callable
+
+[case testMultipassAndPartialTypes]
+def f() -> None:
+    x = []
+    y
+    x.append(1)
+    x.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+    x.append(y) # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+y = ''
+[builtins fixtures/list.pyi]
+[out]
+
+[case testMultipassAndPartialTypes2]
+s = ''
+n = 0
+def f() -> None:
+    global s, n
+    x = []
+    x.append(y)
+    s = x[0]
+    n = x[0] # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    x.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str"
+y = ''
+[builtins fixtures/list.pyi]
+[out]
+
+[case testMultipassAndPartialTypes3]
+from typing import Dict
+def g(d: Dict[str, int]) -> None: pass
+def f() -> None:
+    x = {}
+    x[1] = y
+    g(x) # E: Argument 1 to "g" has incompatible type Dict[int, str]; expected Dict[str, int]
+    x[1] = 1 # E: Incompatible types in assignment (expression has type "int", target has type "str")
+    x[1] = ''
+y = ''
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testMultipassAndPartialTypes4]
+from typing import Dict
+def g(d: Dict[str, int]) -> None: pass
+def f() -> None:
+    x = {}
+    y
+    x[1] = 1
+    g(x) # E: Argument 1 to "g" has incompatible type Dict[int, int]; expected Dict[str, int]
+y = ''
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testMultipassAndCircularDependency]
+class A:
+    def f(self) -> None:
+        self.x = self.y # E: Cannot determine type of 'y'
+
+    def g(self) -> None:
+        self.y = self.x
+[out]
+
+[case testMultipassAndPartialTypesSpecialCase1]
+def f() -> None:
+    y = o
+    x = []
+    x.append(y)
+    x() # E: List[int] not callable
+o = 1
+[builtins fixtures/list.pyi]
+[out]
+
+[case testMultipassAndPartialTypesSpecialCase2]
+def f() -> None:
+    y = o
+    x = {}
+    x[''] = y
+    x() # E: Dict[str, int] not callable
+o = 1
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testMultipassAndPartialTypesSpecialCase3]
+def f() -> None:
+    x = {} # E: Need type annotation for variable
+    y = o
+    z = {} # E: Need type annotation for variable
+o = 1
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testMultipassAndPartialTypesSpecialCase4]
+def f() -> None:
+    y = o
+    x = None
+    x = y
+    x() # E: "int" not callable
+o = 1
+[out]
+
+[case testMultipassAndPartialTypesSpecialCase5]
+def f() -> None:
+    x = None
+    y = o
+    x = y
+    x() # E: "int" not callable
+o = 1
+[out]
+
+[case testMultipassAndClassAttribute]
+class S:
+    def foo(self) -> int:
+        return R.X
+
+class R:
+    X = 2
+
+[case testMultipassAndMultipleFiles]
+import m
+def f() -> None:
+    x()
+x = 0
+[file m.py]
+def g() -> None:
+    y()
+y = 0
+[out]
+tmp/m.py:2: error: "int" not callable
+main:3: error: "int" not callable
+
+
+-- Tests for special cases of unification
+-- --------------------------------------
+
+[case testUnificationRedundantUnion]
+from typing import Union
+a = None  # type: Union[int, str]
+b = None  # type: Union[str, tuple]
+def f(): pass
+def g(x: Union[int, str]): pass
+c = a if f() else b
+g(c) # E: Argument 1 to "g" has incompatible type "Union[int, str, tuple]"; expected "Union[int, str]"
+
+[case testUnificationMultipleInheritance]
+class A: pass
+class B:
+    def foo(self): pass
+class C(A, B): pass
+def f(): pass
+a1 = B() if f() else C()
+a1.foo()
+a2 = C() if f() else B()
+a2.foo()
+
+[case testUnificationMultipleInheritanceAmbiguous]
+# Show that join_instances_via_supertype() breaks ties using the first base class.
+class A1: pass
+class B1:
+    def foo1(self): pass
+class C1(A1, B1): pass
+
+class A2: pass
+class B2:
+    def foo2(self): pass
+class C2(A2, B2): pass
+
+class D1(C1, C2): pass
+class D2(C2, C1): pass
+
+def f(): pass
+
+a1 = D1() if f() else D2()
+a1.foo1()
+a2 = D2() if f() else D1()
+a2.foo2()
+
+[case testUnificationEmptyListLeft]
+def f(): pass
+a = [] if f() else [0]
+a() # E: List[int] not callable
+[builtins fixtures/list.pyi]
+
+[case testUnificationEmptyListRight]
+def f(): pass
+a = [0] if f() else []
+a() # E: List[int] not callable
+[builtins fixtures/list.pyi]
+
+[case testUnificationEmptyListLeftInContext]
+from typing import List
+def f(): pass
+a = [] if f() else [0] # type: List[int]
+a() # E: List[int] not callable
+[builtins fixtures/list.pyi]
+
+[case testUnificationEmptyListRightInContext]
+# TODO Find an example that really needs the context
+from typing import List
+def f(): pass
+a = [0] if f() else [] # type: List[int]
+a() # E: List[int] not callable
+[builtins fixtures/list.pyi]
+
+[case testUnificationEmptySetLeft]
+def f(): pass
+a = set() if f() else {0}
+a() # E: Set[int] not callable
+[builtins fixtures/set.pyi]
+
+[case testUnificationEmptyDictLeft]
+def f(): pass
+a = {} if f() else {0: 0}
+a() # E: Dict[int, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testUnificationEmptyDictRight]
+def f(): pass
+a = {0: 0} if f() else {}
+a() # E: Dict[int, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testUnificationDictWithEmptyListLeft]
+def f(): pass
+a = {0: []} if f() else {0: [0]}
+a() # E: Dict[int, List[int]] not callable
+[builtins fixtures/dict.pyi]
+
+[case testUnificationDictWithEmptyListRight]
+def f(): pass
+a = {0: [0]} if f() else {0: []}
+a() # E: Dict[int, List[int]] not callable
+[builtins fixtures/dict.pyi]
+
+[case testMisguidedSetItem]
+from typing import Generic, Sequence, TypeVar
+T = TypeVar('T')
+class C(Sequence[T], Generic[T]): pass
+C[0] = 0
+[out]
+main:4: error: Type expected within [...]
+main:4: error: Unsupported target for indexed assignment
diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test
new file mode 100644
index 0000000..795bcb1
--- /dev/null
+++ b/test-data/unit/check-isinstance.test
@@ -0,0 +1,1756 @@
+[case testForcedAssignment]
+x = 1 # type: object
+y = 1
+y = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+x = 2
+y = x
+
+[case testJoinAny]
+from typing import List, Any
+
+x = None # type: List[Any]
+
+def foo() -> List[int]: pass
+def bar() -> List[str]: pass
+
+if bool():
+    x = foo()
+else:
+    x = bar()
+
+x * 2
+[builtins fixtures/list.pyi]
+
+[case testGeneratorExpressionTypes]
+class A: y = 1
+x = [A()]
+y = [x]
+
+z = [1,2]
+z = [a.y for b in y for a in b]
+[builtins fixtures/list.pyi]
+
+[case testIsinstanceNestedTuple]
+from typing import Union, List, Tuple, Dict
+
+def f(x: Union[int, str, List]) -> None:
+    if isinstance(x, (str, (int,))):
+        reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+        x[1]  # E: Value of type "Union[int, str]" is not indexable
+    else:
+        reveal_type(x)  # E: Revealed type is 'builtins.list[Any]'
+        x[1]
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]'
+    if isinstance(x, (str, (list,))):
+        reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.list[Any]]'
+        x[1]
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[Any]]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testClassAttributeInitialization-skip]
+class A:
+    x = None # type: int
+    def __init__(self) -> None:
+        self.y = None # type: int
+        z = self.x
+        w = self.y
+
+[case testAssignmentSubtypes-skip]
+from typing import Union
+
+def foo(x: Union[str, int]):
+    if isinstance(x, int):
+        x = 'a'
+    x + 'a'         # Works in the current code
+    z = x           # We probably want this to be of type str.
+    y = [x]         # But what type should this be?
+    y[0] + 'a'      #  (1) Should this work?
+    y + [1]         #  (2) Or this?
+    z = 1           # Also, is this valid?
+
+x = None # type: int
+y = [x]
+[builtins fixtures/isinstancelist.pyi]
+
+[case testFunctionDefaultArgs]
+class A: pass
+class B(A): y = 1
+
+x =  A()
+
+def foo(x: A = B()):
+    x.y   # E: "A" has no attribute "y"
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceFancyConditionals]
+class A: pass
+
+class B(A):
+    y = 1
+
+x =  A()
+if isinstance(x, B):
+    x.y
+while isinstance(x, B):
+    x.y
+while isinstance(x, B):
+    x.y
+    x = B()
+[builtins fixtures/isinstance.pyi]
+
+[case testSubtypingWithAny]
+class A:
+    y = 1
+
+class B(A):
+    z = 1
+
+def foo(): pass
+
+x = A()
+x = B()
+x.z
+x = foo()
+x.z          # E: "A" has no attribute "z"
+x.y
+
+[case testSingleMultiAssignment-skip]
+x = 'a'
+(x, ) = ('a',)
+
+[case testUnionMultiAssignment]
+from typing import Union
+x = None # type: Union[int, str]
+x = 1
+x = 'a'
+x + 1    # E: Unsupported operand types for + ("str" and "int")
+x = 1
+(x, y) = ('a', 1)
+x + 1    # E: Unsupported operand types for + ("str" and "int")
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnionIfZigzag]
+from typing import Union
+
+def f(x: Union[int, str]) -> None:
+    x = 1
+    if x:
+        x = 'a'
+        x = 1
+    x + 1
+[builtins fixtures/isinstancelist.pyi]
+
+[case testTwoLoopsUnion]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+def bar() -> None:
+    x = foo()
+    if isinstance(x, int):
+        return
+    while bool():
+        x + 'a'
+        while bool():
+            x = foo()
+            if bool():
+                return
+            x = 'a'
+    x + 'a'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testComplicatedBlocks]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+def bar() -> None:
+    x = foo()
+    if isinstance(x, int):
+        return
+    while bool():
+        x + 'a'
+        while bool():
+            x = foo()
+            if bool():
+                return
+            x = 'a'
+    x + 'a'
+
+    x = foo()
+    if isinstance(x, int):
+        return
+    while bool():
+        x + 'a'
+        while bool():
+            x + 'a'         # E: Unsupported operand types for + (likely involving Union)
+            x = foo()
+            if bool():
+                continue
+            x = 'a'
+        x = 'a'
+    x + 'a'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnionTryExcept]
+class A:
+    y = A()
+
+class B(A):
+    z = 1
+
+x = A()
+x = B()
+x.z
+try:
+    x.z
+    x = A()
+    x = B()
+    x.z
+except:
+    pass
+x.z           # E: "A" has no attribute "z"
+
+[case testUnionTryExcept2]
+class A:
+    y = A()
+
+class B(A):
+    z = 1
+
+x = A()
+try:
+    x.z # E: "A" has no attribute "z"
+    x = A()
+    x = B()
+    x.z
+except:
+    x.z # E: "A" has no attribute "z"
+    x = B()
+    x.z
+else:
+    x = B()
+x.z
+
+[case testUnionTryExcept3]
+class A:
+    y = A()
+
+class B(A):
+    z = 1
+
+x = A()
+x = B()
+try:
+    raise BaseException()
+    x = A()
+except:
+    pass
+x.z
+x = B()
+try:
+    x = A()
+    raise BaseException()
+except:
+    pass
+x.z           # E: "A" has no attribute "z"
+x = B()
+try:
+    pass
+except:
+    x = A()
+    raise BaseException()
+x.z
+try:
+    x = A()
+except:
+    pass
+x.z           # E: "A" has no attribute "z"
+x = B()
+try:
+    pass
+except:
+    x = A()
+x.z           # E: "A" has no attribute "z"
+[builtins fixtures/exception.pyi]
+
+[case testUnionTryExcept4]
+class A: pass
+
+class B(A):
+    z = 1
+
+x = A()
+while bool():
+    try:
+        x.z # E: "A" has no attribute "z"
+        x = A()
+    except:
+        x = B()
+    else:
+        x = B()
+    x.z
+[builtins fixtures/exception.pyi]
+
+[case testUnionTryFinally]
+class A: pass
+
+class B(A):
+    b = 1
+
+x = A()
+x = B()
+try:
+    x = A()
+    x.b # E: "A" has no attribute "b"
+    x = B()
+finally:
+    x.b  # E: "A" has no attribute "b"
+x.b
+
+[case testUnionTryFinally2]
+class A: pass
+
+class B(A):
+    b = 1
+
+x = A()
+x = B()
+try:
+    x = A()
+    x = B()
+except:
+    pass
+finally:
+    pass
+x.b      # E: "A" has no attribute "b"
+
+[case testUnionTryFinally3]
+class A: pass
+
+class B(A):
+    b = 1
+
+x = A()
+x = B()
+try:
+    x = A()
+    x = B()
+except:
+    pass
+finally:
+    x = B()
+x.b
+
+[case testUnionTryFinally4]
+class A: pass
+
+class B(A):
+    b = 1
+
+while 2:
+    x = A()
+    x = B()
+    try:
+        x = A()
+        x = B()
+    except:
+        pass
+    finally:
+        x.b     # E: "A" has no attribute "b"
+        if not isinstance(x, B):
+            break
+    x.b
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnionTryFinally5]
+class A: pass
+
+class B(A):
+    b = 1
+
+while 2:
+    x = A()
+    try:
+        x = A()
+        x = B()
+    finally:
+        x.b    # E: "A" has no attribute "b"
+        break
+        x.b
+    x.b
+
+[case testUnionTryFinally6]
+class A: pass
+
+class B(A):
+    b = 1
+
+def f() -> int:
+    x = B()  # type: A
+    try:
+        x = B()
+    except:
+        x = A()
+        # An exception could occur here
+        x = B()
+    finally:
+        return x.b # E: "A" has no attribute "b"
+
+[case testUnionListIsinstance]
+from typing import Union, List
+
+def f(x: Union[List[int], List[str], int]) -> None:
+    if isinstance(x, list):
+        a = x[0]
+        if isinstance(a, int):
+            a + 1
+            a + 'x' # E: Unsupported operand types for + ("int" and "str")
+
+        # type of a?
+        reveal_type(x) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str]]'
+        x + 1 # E: Unsupported operand types for + (likely involving Union)
+    else:
+        x[0] # E: Value of type "int" is not indexable
+        x + 1
+    x[0] # E: Value of type "Union[List[int], List[str], int]" is not indexable
+    x + 1 # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnionListIsinstance2]
+from typing import Union, List
+
+class A:
+    a = 1
+
+class B: pass
+class C: pass
+
+def g(x: Union[A, B]) -> A: pass
+def h(x: C) -> A: pass
+def f(x: Union[A, B, C]) -> None:
+    if isinstance(x, C):
+        x = h(x)
+    else:
+        x = g(x)
+    x.a
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnionStrictDefnBasic]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x = 1
+x = x + 1
+x = foo()
+x = x + 1                # E: Unsupported operand types for + (likely involving Union)
+if isinstance(x, str):
+   x = x + 1             # E: Unsupported operand types for + ("str" and "int")
+   x = 1
+   x = x + 1
+[builtins fixtures/isinstancelist.pyi]
+
+[case testSubtypeRedefinitionBasic]
+from typing import Union
+
+class A: pass
+
+class B(A):
+    y = 1
+
+x = A()
+x.y        # E: "A" has no attribute "y"
+x = B()
+x.y        # OK: x is known to be a B
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceBasic]
+from typing import Union
+x = None # type: Union[int, str]
+if isinstance(x, str):
+    x = x + 1   # E: Unsupported operand types for + ("str" and "int")
+    x = x + 'a'
+else:
+    x = x + 'a' # E: Unsupported operand types for + ("int" and "str")
+    x = x + 1
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceIndexing]
+from typing import Union
+x = None # type: Union[int, str]
+j = [x]
+if isinstance(j[0], str):
+    j[0] = j[0] + 'a'
+    j[0] = j[0] + 1   # E: Unsupported operand types for + ("str" and "int")
+else:
+    j[0] = j[0] + 'a' # E: Unsupported operand types for + ("int" and "str")
+    j[0] = j[0] + 1
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceSubClassMember]
+from typing import Union
+
+class Animal: pass
+
+class Dog(Animal):
+    paws = 4  # type: Union[int, str]
+
+    def bark(self): pass
+
+class House:
+    pet = None  # type: Animal
+
+h = House()
+h.pet = Dog()
+
+while bool():
+    if isinstance(h.pet, Dog):
+        if isinstance(h.pet.paws, str):
+            x = h.pet.paws + 'a'
+        y = h.pet.paws + 1   # E: Unsupported operand types for + (likely involving Union)
+        z = h.pet.paws + 'a' # E: Unsupported operand types for + (likely involving Union)
+        if isinstance(h.pet.paws, str):
+            x = h.pet.paws + 'a'
+            break
+        y = h.pet.paws + 1
+        z = h.pet.paws + 'a' # E: Unsupported operand types for + ("int" and "str")
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceSubClassReset]
+class A:
+    pass
+
+class B(A):
+    b = 1
+
+class C:
+    a = A()
+
+x = C()
+x.a.b                   # E: "A" has no attribute "b"
+if isinstance(x.a, B):
+    x.a.b
+    x = C()
+    x.a.b               # E: "A" has no attribute "b"
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceTuple]
+from typing import Union
+
+class A: pass
+
+class B:
+    def method2(self, arg: int):
+        return 123
+
+class C:
+    def method2(self, arg: int):
+        return 456
+
+    def method3(self, arg: str):
+        return 'abc'
+
+v = A() # type: Union[A, B, C]
+
+if isinstance(v, (B, C)):
+    v.method2(123)
+    v.method3('xyz') # E: Item "B" of "Union[B, C]" has no attribute "method3"
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceNeverWidens]
+from typing import Union
+
+class A: pass
+class B: pass
+class C: pass
+
+a = A()  # type: A
+assert isinstance(a, (A, B))
+reveal_type(a)  # E: Revealed type is '__main__.A'
+
+b = A()  # type: Union[A, B]
+assert isinstance(b, (A, B, C))
+reveal_type(b)  # E: Revealed type is 'Union[__main__.A, __main__.B]'
+[builtins fixtures/isinstance.pyi]
+
+[case testMemberAssignmentChanges-skip]
+from typing import Union
+
+class Dog:
+    paws = 1  # type: Union[int, str]
+
+pet = Dog()
+
+pet.paws + 'a'  # E: moo
+pet.paws = 'a'
+pet.paws + 'a'
+pet.paws = 1
+pet.paws + 1
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceSubClassMemberHard-skip]
+from typing import Union
+
+class Animal:
+    pass
+
+class Dog(Animal):
+    paws = 4  # type: Union[int, str]
+
+    def bark(self): pass
+
+class House:
+    pet = None  # type: Animal
+
+h = House()
+h.pet = Dog()
+
+if isinstance(h.pet, Dog):
+    if isinstance(h.pet.paws, str):
+        for i in [1]:
+            h.pet.paws + 'a'
+            if bool():
+                break
+            h.pet.paws = 1
+            h.pet.paws + 1
+
+    if isinstance(h.pet.paws, str):
+        h.pet.paws + 'a'
+    else:
+        h.pet.paws + 1
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceReturn]
+from typing import Union
+
+def foo() -> None:
+    x = 1 # type: Union[int, str]
+    if isinstance(x, int):
+        return
+    y = x + 'asdad'
+
+def bar() -> None:
+    x = 1 # type: Union[int, str]
+    if isinstance(x, int):
+        return
+    else:
+        pass
+    y = x + 'asdad'
+
+foo()
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceBadBreak]
+from typing import Union
+
+def foo() -> None:
+    x = None # type: Union[int, str]
+    if isinstance(x, int):
+        for z in [1,2]:
+            break
+    else:
+        pass
+    y = x + 'asdad'    # E: Unsupported operand types for + (likely involving Union)
+
+foo()
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceThreeUnion]
+from typing import Union, List
+
+x = None # type: Union[int, str, List[int]]
+
+while bool():
+    if isinstance(x, int):
+        x + 1
+    elif isinstance(x, str):
+        x + 'a'
+    else:
+        x + [1]
+    x + 'a'           # E: Unsupported operand types for + (likely involving Union)
+x + [1]               # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceThreeUnion2]
+from typing import Union, List
+x = None # type: Union[int, str, List[int]]
+while bool():
+    if isinstance(x, int):
+        x + 1
+        break
+    elif isinstance(x, str):
+        x + 'a'
+        break
+    x + [1]
+    x + 'a'           # E: Unsupported operand types for + (List[int] and "str")
+x + [1]               # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceThreeUnion3]
+from typing import Union, List
+
+while bool():
+    x = None # type: Union[int, str, List[int]]
+    x = 1
+    if isinstance(x, int):
+        x + 1
+        break
+    elif isinstance(x, str):
+        x + 'a'
+        break
+    x + [1]           # These lines aren't reached because x was an int
+    x + 'a'
+x + [1]               # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testRemovingTypeRepeatedly]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+for i in [1, 2]:
+    x = foo()
+    x + 'a'                 # E: Unsupported operand types for + (likely involving Union)
+    if isinstance(x, int):
+        break
+    x + 'a'
+
+    x = foo()
+    x + 'a'                 # E: Unsupported operand types for + (likely involving Union)
+    if isinstance(x, int):
+        break
+    x + 'a'
+
+    x = foo()
+    x + 'a'                 # E: Unsupported operand types for + (likely involving Union)
+    if isinstance(x, int):
+        break
+    x + 'a'
+
+x + 'a'                    # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyRepeatedly]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x + 1     # E: Unsupported operand types for + (likely involving Union)
+x + 'a'   # E: Unsupported operand types for + (likely involving Union)
+
+x = 1
+x + 1
+x + 'a'   # E: Unsupported operand types for + ("int" and "str")
+
+x = 'a'
+x + 1     # E: Unsupported operand types for + ("str" and "int")
+x + 'a'
+
+x = foo()
+x + 1     # E: Unsupported operand types for + (likely involving Union)
+x + 'a'   # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyLoop]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x + 1      # E: Unsupported operand types for + (likely involving Union)
+x = 'a'
+x + 1      # E: Unsupported operand types for + ("str" and "int")
+x = 1
+x + 1
+
+while bool():
+      x + 1    # E: Unsupported operand types for + (likely involving Union)
+      x = 'a'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyLoop2]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x + 1      # E: Unsupported operand types for + (likely involving Union)
+x = 'a'
+x + 1      # E: Unsupported operand types for + ("str" and "int")
+x = 1
+x + 1
+
+for i in [1]:
+      x = 'a'
+
+x + 1    # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyLoop3]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x = 1
+
+while bool():
+    x + 1
+    x = 'a'
+    break
+else:
+    x + 1
+x + 1      # E: Unsupported operand types for + (likely involving Union)
+x = 1
+for y in [1]:
+    x + 1
+    x = 'a'
+    break
+else:
+    x + 1
+x + 1      # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyLoopWhile4]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x = 1
+
+while bool():
+    x + 1
+    if bool():
+        x = 'a'
+        break
+else:
+    x + 1
+    x = 'a'
+x + 'a'
+x = 1
+while bool():
+    x + 1 # E: Unsupported operand types for + (likely involving Union)
+    if bool():
+        x = 'a'
+        continue
+else:
+    x + 1 # E: Unsupported operand types for + (likely involving Union)
+    x = 'a'
+x + 'a'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyLoopFor4]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x = 1
+
+for y in [1]:
+    x + 1
+    if bool():
+        x = 'a'
+        break
+else:
+    x + 1
+    x = 'a'
+x + 'a'
+x = 1
+for y in [1]:
+    x + 1 # E: Unsupported operand types for + (likely involving Union)
+    if bool():
+        x = 'a'
+        continue
+else:
+    x + 1 # E: Unsupported operand types for + (likely involving Union)
+    x = 'a'
+x + 'a'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyNestedLoop]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x = 1
+
+for y in [1]:
+    for z in [1]:
+        break
+    else:
+        x = 'a'
+        break
+else:
+    x + 1
+x + 1 # E: Unsupported operand types for + (likely involving Union)
+x = 1
+while bool():
+    while bool():
+        break
+    else:
+        x = 'a'
+        break
+else:
+    x + 1
+x + 1 # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyLoopLong]
+from typing import Union
+
+class A: a = 1
+
+def foo() -> Union[int, str, A]: pass
+
+def bar() -> None:
+    x = foo()
+    x + 1  # E: Unsupported left operand type for + (some union) \
+           # E: Unsupported operand types for + (likely involving Union)
+    if isinstance(x, A):
+       x.a
+    else:
+       if isinstance(x, int):
+          x + 1
+          x + 'a'  # E: Unsupported operand types for + ("int" and "str")
+       else:
+          x + 'a'
+          x.a      # E: "str" has no attribute "a"
+       x = A()
+
+    if isinstance(x, str):
+       x + 'a'
+    else:
+       while bool():
+          if isinstance(x, int):
+             x + 1
+          else:
+             x.a
+          break
+       while bool():
+          if isinstance(x, int):
+             x + 1
+          else:
+             x.a
+          continue
+
+       while bool():
+          if isinstance(x, int):
+             x + 1
+          else:
+             x.a     # E: Item "str" of "Union[str, A]" has no attribute "a"
+          x = 'a'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testWhileExitCondition1]
+from typing import Union
+x = 1  # type: Union[int, str]
+while isinstance(x, int):
+    if bool():
+        continue
+    x = 'a'
+else:
+    reveal_type(x)  # E: Revealed type is 'builtins.str'
+reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/isinstance.pyi]
+
+[case testWhileExitCondition2]
+from typing import Union
+x = 1  # type: Union[int, str]
+while isinstance(x, int):
+    if bool():
+        break
+    x = 'a'
+else:
+    reveal_type(x)  # E: Revealed type is 'builtins.str'
+reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[builtins fixtures/isinstance.pyi]
+
+[case testWhileLinkedList]
+from typing import Union
+LinkedList = Union['Cons', 'Nil']
+class Nil: pass
+class Cons:
+    tail = None  # type: LinkedList
+def last(x: LinkedList) -> Nil:
+    while isinstance(x, Cons):
+        x = x.tail
+    return x
+[builtins fixtures/isinstance.pyi]
+
+[case testReturnAndFlow]
+def foo() -> int:
+    return 1 and 2
+    return 'a'
+[case testCastIsinstance]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+y = 1 # type: int
+
+if isinstance(x, str):
+    x = y
+x + 1
+x + 'a'   # E: Unsupported operand types for + ("int" and "str")
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnreachableCode]
+x = 1 # type: int
+
+while bool():
+    x = 'a'           # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    break
+    x = 'a'           # Note: no error because unreachable code
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnreachableCode2]
+x = 1
+while bool():
+    try:
+        pass
+    except:
+        continue
+    else:
+        continue
+    x + 'a'
+[builtins fixtures/isinstance.pyi]
+
+[case testUnreachableWhileTrue]
+def f(x: int) -> None:
+    while True:
+        if x:
+            return
+    1()
+[builtins fixtures/bool.pyi]
+
+[case testUnreachableAssertFalse]
+def f() -> None:
+    assert False
+    1()
+[builtins fixtures/bool.pyi]
+
+[case testUnreachableAssertFalse2]
+def f() -> None:
+    # The old parser doesn't understand the syntax below
+    assert False, "hi"
+    1()
+[builtins fixtures/bool.pyi]
+
+[case testUnreachableReturnOrAssertFalse]
+def f(x: int) -> int:
+    if x:
+        return x
+    else:
+        assert False
+    1()
+[builtins fixtures/bool.pyi]
+
+[case testUnreachableTryExcept]
+def f() -> None:
+    try:
+        f()
+        return
+    except BaseException:
+        return
+    1()
+[builtins fixtures/exception.pyi]
+
+[case testUnreachableTryExceptElse]
+def f() -> None:
+    try:
+        f()
+    except BaseException:
+        return
+    else:
+        return
+    1()
+[builtins fixtures/exception.pyi]
+
+[case testUnreachableTryReturnFinally1]
+def f() -> None:
+    try:
+        return
+    finally:
+        pass
+    1()
+
+[case testUnreachableTryReturnFinally2]
+def f() -> None:
+    try:
+        pass
+    finally:
+        return
+    1()
+
+[case testUnreachableTryReturnExceptRaise]
+def f() -> None:
+    try:
+        return
+    except:
+        raise
+    1()
+
+[case testUnreachableReturnLambda]
+from typing import Callable
+def g(t: Callable[[int], int]) -> int: pass
+def f() -> int:
+    return g(lambda x: x)
+    1()
+
+[case testIsinstanceAnd]
+class A: pass
+
+class B(A):
+    flag = 1
+
+x = B() # type: A
+
+if isinstance(x, B) and 1:
+   x.flag
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceShortcircuit]
+class A:
+    pass
+
+class B(A):
+    flag = 1
+
+x = B() # type: A
+
+if isinstance(x, B) and x.flag:
+    pass
+if isinstance(x, B) or x.flag: # E: "A" has no attribute "flag"
+    pass
+if not isinstance(x, B) or x.flag:
+    pass
+if not isinstance(x, B) and x.flag: # E: "A" has no attribute "flag"
+    pass
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceExpression]
+class A:
+    pass
+
+class B(A):
+    flag = 1
+
+x = B() # type: A
+
+x.flag if isinstance(x, B) else 0
+0 if not isinstance(x, B) else x.flag
+0 if isinstance(x, B) else x.flag # E: "A" has no attribute "flag"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceMultiAnd]
+class A: pass
+
+class B(A):
+    flag = 1
+
+class C(A):
+    glaf = 1
+
+x = B() # type: A
+y = C() # type: A
+
+if isinstance(x, B) and isinstance(y, C):
+    x.flag += 1
+    y.glaf += 1
+    x() # E: "B" not callable
+    y() # E: "C" not callable
+else:
+    x() # E: "A" not callable
+    y() # E: "A" not callable
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceMultiAndSpecialCase]
+class A: pass
+
+class B(A):
+    flag = 1
+
+class C(A):
+    glaf = 1
+
+x = B() # type: A
+y = C() # type: A
+
+if isinstance(x, B) and isinstance(y, int):
+    1() # type checking skipped
+if isinstance(y, int) and isinstance(x, B):
+    1() # type checking skipped
+[builtins fixtures/isinstancelist.pyi]
+
+[case testReturnWithCallExprAndIsinstance]
+from typing import Union
+
+def f(x: Union[int, str]) -> None:
+    if not isinstance(x, int):
+        return foo()
+    x()  # E: "int" not callable
+
+def foo(): pass
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceOr1]
+from typing import Optional
+
+def f(a: bool, x: object) -> Optional[int]:
+    if a or not isinstance(x, int):
+        return None
+    reveal_type(x) # E: Revealed type is 'builtins.int'
+    return x
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceOr2]
+from typing import Optional
+
+def g(a: bool, x: object) -> Optional[int]:
+    if not isinstance(x, int) or a:
+        return None
+    reveal_type(x) # E: Revealed type is 'builtins.int'
+    return x
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceOr3]
+from typing import Optional
+
+def h(a: bool, x: object) -> Optional[int]:
+    if a or isinstance(x, int):
+        return None
+    return x # E: Incompatible return value type (got "object", expected "Optional[int]")
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceWithOverlappingUnionType]
+from typing import Union
+
+def f(x: Union[float, int]) -> None:
+    if isinstance(x, float):
+        pass
+    if not isinstance(x, int):
+        f(x)
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceWithOverlappingUnionType2]
+from typing import Union
+
+class A: pass
+class B(A): pass
+
+def f(x: Union[A, B]) -> None:
+    if isinstance(x, A):
+        pass
+    if not isinstance(x, B):
+        f(x)
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceOfSuperclass]
+class A: pass
+class B(A): pass
+
+x = B()
+if isinstance(x, A):
+    reveal_type(x)  # E: Revealed type is '__main__.B'
+if not isinstance(x, A):
+    reveal_type(x)  # unreachable
+    x = A()
+reveal_type(x)  # E: Revealed type is '__main__.B'
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceOfNonoverlapping]
+class A: pass
+class B: pass
+
+x = B()
+if isinstance(x, A):
+    reveal_type(x)  # unreachable
+else:
+    reveal_type(x)  # E: Revealed type is '__main__.B'
+reveal_type(x)  # E: Revealed type is '__main__.B'
+[builtins fixtures/isinstance.pyi]
+
+[case testAssertIsinstance]
+def f(x: object):
+    assert isinstance(x, int)
+    y = 0 # type: int
+    y = x
+[builtins fixtures/isinstance.pyi]
+
+[case testUnionAssertIsinstance]
+from typing import Union
+
+def f(x: Union[str, int]):
+    assert isinstance(x, int)
+    y = 0 # type: int
+    y = x
+[builtins fixtures/isinstance.pyi]
+
+[case testAnyAssertIsinstance]
+from typing import Any
+
+def f(x: Any):
+    assert isinstance(x, int)  # this should narrow x to type int
+    x + "foo"  # E: Unsupported operand types for + ("int" and "str")
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceOfGenericClassRetainsParameters]
+from typing import List, Union
+
+def f(x: Union[List[int], str]) -> None:
+    if isinstance(x, list):
+        x[0]() # E: "int" not callable
+    else:
+        reveal_type(x) # E: Revealed type is 'builtins.str'
+    reveal_type(x) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.str]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceOrIsinstance]
+class A: pass
+
+class B(A):
+    flag = 1
+
+class C(A):
+    flag = 2
+
+x1 = A()
+if isinstance(x1, B) or isinstance(x1, C):
+    reveal_type(x1) # E: Revealed type is 'Union[__main__.B, __main__.C]'
+    f = x1.flag  # type: int
+else:
+    reveal_type(x1) # E: Revealed type is '__main__.A'
+    f = 0
+reveal_type(x1) # E: Revealed type is '__main__.A'
+x2 = A()
+if isinstance(x2, A) or isinstance(x2, C):
+    reveal_type(x2) # E: Revealed type is '__main__.A'
+    f = x2.flag # E: "A" has no attribute "flag"
+else:
+    # unreachable
+    1()
+reveal_type(x2) # E: Revealed type is '__main__.A'
+[builtins fixtures/isinstance.pyi]
+
+[case testComprehensionIsInstance]
+from typing import List, Union
+a = [] # type: List[Union[int, str]]
+l = [x for x in a if isinstance(x, int)]
+g = (x for x in a if isinstance(x, int))
+d = {0: x for x in a if isinstance(x, int)}
+reveal_type(l) # E: Revealed type is 'builtins.list[builtins.int*]'
+reveal_type(g) # E: Revealed type is 'typing.Iterator[builtins.int*]'
+reveal_type(d) # E: Revealed type is 'builtins.dict[builtins.int*, builtins.int*]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceInWrongOrderInBooleanOp]
+class A:
+    m = 1
+
+def f(x: object) -> None:
+    if x.m and isinstance(x, A) or False:  # E: "object" has no attribute "m"
+        pass
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceAndOr]
+class A:
+    a = None  # type: A
+
+def f(x: object) -> None:
+    b = isinstance(x, A) and x.a or A()
+    reveal_type(b)  # E: Revealed type is '__main__.A'
+[builtins fixtures/isinstance.pyi]
+
+[case testIsInstanceWithUnknownType]
+from typing import Union
+
+def f(x: Union[int, str], typ: type) -> None:
+    if isinstance(x, (typ, int)):
+        x + 1  # E: Unsupported operand types for + (likely involving Union)
+        reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+    else:
+        reveal_type(x)  # E: Revealed type is 'builtins.str'
+    reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceWithBoundedType]
+from typing import Union, Type
+
+class A: pass
+
+def f(x: Union[int, A], a: Type[A]) -> None:
+    if isinstance(x, (a, int)):
+        reveal_type(x)  # E: Revealed type is 'Union[builtins.int, __main__.A]'
+    else:
+        reveal_type(x)  # E: Revealed type is '__main__.A'
+    reveal_type(x) # E: Revealed type is 'Union[builtins.int, __main__.A]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceWithEmtpy2ndArg]
+from typing import Union
+
+def f(x: Union[int, str]) -> None:
+    if isinstance(x, ()):
+        reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+    else:
+        reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceWithTypeObject]
+from typing import Union, Type
+
+class A: pass
+
+def f(x: Union[int, A], a: Type[A]) -> None:
+    if isinstance(x, a):
+        reveal_type(x)  # E: Revealed type is '__main__.A'
+    elif isinstance(x, int):
+        reveal_type(x)  # E: Revealed type is 'builtins.int'
+    else:
+        reveal_type(x)  # E: Revealed type is '__main__.A'
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.int, __main__.A]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIssubclassUnreachable]
+from typing import Type, Sequence, Union
+
+x: Type[str]
+if issubclass(x, int):
+    reveal_type(x)  # unreachable block
+
+class X: pass
+class Y(X): pass
+class Z(X): pass
+
+a: Union[Type[Y], Type[Z]]
+if issubclass(a, X):
+    reveal_type(a)  # E: Revealed type is 'Union[Type[__main__.Y], Type[__main__.Z]]'
+else:
+    reveal_type(a)  # unreachable block
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIssubclasDestructuringUnions1]
+from typing import Union, List, Tuple, Dict, Type
+def f(x: Union[Type[int], Type[str], Type[List]]) -> None:
+    if issubclass(x, (str, (int,))):
+        reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]'
+        reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+        x()[1]  # E: Value of type "Union[int, str]" is not indexable
+    else:
+        reveal_type(x)  # E: Revealed type is 'Type[builtins.list]'
+        reveal_type(x())  # E: Revealed type is 'builtins.list[<nothing>]'
+        x()[1]
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list]]'
+    reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+    if issubclass(x, (str, (list,))):
+        reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]'
+        reveal_type(x())  # E: Revealed type is 'Union[builtins.str, builtins.list[<nothing>]]'
+        x()[1]
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
+    reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIssubclasDestructuringUnions2]
+from typing import Union, List, Tuple, Dict, Type
+
+def f(x: Type[Union[int, str, List]]) -> None:
+    if issubclass(x, (str, (int,))):
+        reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]'
+        reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+        x()[1]  # E: Value of type "Union[int, str]" is not indexable
+    else:
+        reveal_type(x)  # E: Revealed type is 'Type[builtins.list]'
+        reveal_type(x())  # E: Revealed type is 'builtins.list[<nothing>]'
+        x()[1]
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list]]'
+    reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+    if issubclass(x, (str, (list,))):
+        reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]'
+        reveal_type(x())  # E: Revealed type is 'Union[builtins.str, builtins.list[<nothing>]]'
+        x()[1]
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
+    reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIssubclasDestructuringUnions3]
+from typing import Union, List, Tuple, Dict, Type
+
+def f(x: Type[Union[int, str, List]]) -> None:
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list]]'
+    reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+    if issubclass(x, (str, (int,))):
+        reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str]]'
+        reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+        x()[1]  # E: Value of type "Union[int, str]" is not indexable
+    else:
+        reveal_type(x)  # E: Revealed type is 'Type[builtins.list]'
+        reveal_type(x())  # E: Revealed type is 'builtins.list[<nothing>]'
+        x()[1]
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list]]'
+    reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+    if issubclass(x, (str, (list,))):
+        reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.str], Type[builtins.list[Any]]]'
+        reveal_type(x())  # E: Revealed type is 'Union[builtins.str, builtins.list[<nothing>]]'
+        x()[1]
+    reveal_type(x)  # E: Revealed type is 'Union[Type[builtins.int], Type[builtins.str], Type[builtins.list[Any]]]'
+    reveal_type(x())  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.list[<nothing>]]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIssubclass]
+from typing import Type, ClassVar
+
+class Goblin:
+    level: int
+
+class GoblinAmbusher(Goblin):
+    job: ClassVar[str] = 'Ranger'
+
+def test_issubclass(cls: Type[Goblin]) -> None:
+    if issubclass(cls, GoblinAmbusher):
+        reveal_type(cls)  # E: Revealed type is 'Type[__main__.GoblinAmbusher]'
+        cls.level
+        cls.job
+        ga = cls()
+        ga.level = 15
+        ga.job
+        ga.job = "Warrior"  # E: Cannot assign to class variable "job" via instance
+    else:
+        reveal_type(cls)  # E: Revealed type is 'Type[__main__.Goblin]'
+        cls.level
+        cls.job  # E: Type[Goblin] has no attribute "job"
+        g = cls()
+        g.level = 15
+        g.job  # E: "Goblin" has no attribute "job"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIssubclassDeepHierarchy]
+from typing import Type, ClassVar
+
+class Mob: pass
+
+class Goblin(Mob):
+    level: int
+
+class GoblinAmbusher(Goblin):
+    job: ClassVar[str] = 'Ranger'
+
+def test_issubclass(cls: Type[Mob]) -> None:
+    if issubclass(cls, Goblin):
+        reveal_type(cls)  # E: Revealed type is 'Type[__main__.Goblin]'
+        cls.level
+        cls.job  # E: Type[Goblin] has no attribute "job"
+        g = cls()
+        g.level = 15
+        g.job  # E: "Goblin" has no attribute "job"
+        if issubclass(cls, GoblinAmbusher):
+            reveal_type(cls)  # E: Revealed type is 'Type[__main__.GoblinAmbusher]'
+            cls.level
+            cls.job
+            g = cls()
+            g.level = 15
+            g.job
+            g.job = 'Warrior' # E: Cannot assign to class variable "job" via instance
+    else:
+        reveal_type(cls)  # E: Revealed type is 'Type[__main__.Mob]'
+        cls.job  # E: Type[Mob] has no attribute "job"
+        cls.level  # E: Type[Mob] has no attribute "level"
+        m = cls()
+        m.level = 15  # E: "Mob" has no attribute "level"
+        m.job  # E: "Mob" has no attribute "job"
+        if issubclass(cls, GoblinAmbusher):
+            reveal_type(cls)  # E: Revealed type is 'Type[__main__.GoblinAmbusher]'
+            cls.job
+            cls.level
+            ga = cls()
+            ga.level = 15
+            ga.job
+            ga.job = 'Warrior' # E: Cannot assign to class variable "job" via instance
+
+    if issubclass(cls, GoblinAmbusher):
+        reveal_type(cls)  # E: Revealed type is 'Type[__main__.GoblinAmbusher]'
+        cls.level
+        cls.job
+        ga = cls()
+        ga.level = 15
+        ga.job
+        ga.job = "Warrior"  # E: Cannot assign to class variable "job" via instance
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIssubclassTuple]
+from typing import Type, ClassVar
+
+class Mob: pass
+
+class Goblin(Mob):
+    level: int
+
+class GoblinAmbusher(Goblin):
+    job: ClassVar[str] = 'Ranger'
+
+class GoblinDigger(Goblin):
+    job: ClassVar[str] = 'Thief'
+
+def test_issubclass(cls: Type[Mob]) -> None:
+    if issubclass(cls, (Goblin, GoblinAmbusher)):
+        reveal_type(cls)  # E: Revealed type is 'Type[__main__.Goblin]'
+        cls.level
+        cls.job  # E: Type[Goblin] has no attribute "job"
+        g = cls()
+        g.level = 15
+        g.job  # E: "Goblin" has no attribute "job"
+        if issubclass(cls, GoblinAmbusher):
+            cls.level
+            reveal_type(cls)  # E: Revealed type is 'Type[__main__.GoblinAmbusher]'
+            cls.job
+            ga = cls()
+            ga.level = 15
+            ga.job
+            ga.job = "Warrior"  # E: Cannot assign to class variable "job" via instance
+    else:
+        reveal_type(cls)  # E: Revealed type is 'Type[__main__.Mob]'
+        cls.job  # E: Type[Mob] has no attribute "job"
+        cls.level  # E: Type[Mob] has no attribute "level"
+        m = cls()
+        m.level = 15  # E: "Mob" has no attribute "level"
+        m.job  # E: "Mob" has no attribute "job"
+        if issubclass(cls, GoblinAmbusher):
+            reveal_type(cls)  # E: Revealed type is 'Type[__main__.GoblinAmbusher]'
+            cls.job
+            cls.level
+            ga = cls()
+            ga.level = 15
+            ga.job
+            ga.job = "Warrior"  # E: Cannot assign to class variable "job" via instance
+
+    if issubclass(cls, (GoblinDigger, GoblinAmbusher)):
+        reveal_type(cls)  # E: Revealed type is 'Union[Type[__main__.GoblinDigger], Type[__main__.GoblinAmbusher]]'
+        cls.level
+        cls.job
+        g = cls()
+        g.level = 15
+        g.job
+        g.job = "Warrior" # E: Cannot assign to class variable "job" via instance
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIssubclassBuiltins]
+from typing import List, Type
+
+class MyList(List): pass
+class MyIntList(List[int]): pass
+
+def f(cls: Type[object]) -> None:
+    if issubclass(cls, MyList):
+        reveal_type(cls)  # E: Revealed type is 'Type[__main__.MyList]'
+        cls()[0]
+    else:
+        reveal_type(cls)  # E: Revealed type is 'Type[builtins.object]'
+        cls()[0]  # E: Value of type "object" is not indexable
+
+    if issubclass(cls, MyIntList):
+        reveal_type(cls)  # E: Revealed type is 'Type[__main__.MyIntList]'
+        cls()[0] + 1
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceTypeArgs]
+from typing import Iterable, TypeVar
+x = 1
+T = TypeVar('T')
+
+isinstance(x, Iterable)
+isinstance(x, Iterable[int])  # E: Parameterized generics cannot be used with class or instance checks
+isinstance(x, Iterable[T])  # E: Parameterized generics cannot be used with class or instance checks
+isinstance(x, (int, Iterable[int]))  # E: Parameterized generics cannot be used with class or instance checks
+isinstance(x, (int, (str, Iterable[int])))  # E: Parameterized generics cannot be used with class or instance checks
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceTypeArgsAliases]
+from typing import Iterable, TypeVar
+x = 1
+T = TypeVar('T')
+It = Iterable
+It2 = Iterable[T]
+
+isinstance(x, It[int])  # E: Parameterized generics cannot be used with class or instance checks
+isinstance(x, It)
+isinstance(x, It2[int])  # E: Parameterized generics cannot be used with class or instance checks
+isinstance(x, It2)  # E: Parameterized generics cannot be used with class or instance checks
+[builtins fixtures/isinstance.pyi]
+
+[case testIssubclassTypeArgs]
+from typing import Iterable, TypeVar
+x = int
+T = TypeVar('T')
+issubclass(x, Iterable)
+issubclass(x, Iterable[int])  # E: Parameterized generics cannot be used with class or instance checks
+issubclass(x, Iterable[T])  # E: Parameterized generics cannot be used with class or instance checks
+issubclass(x, (int, Iterable[int]))  # E: Parameterized generics cannot be used with class or instance checks
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceAndNarrowTypeVariable]
+from typing import TypeVar
+
+class A: pass
+class B(A): pass
+
+T = TypeVar('T', bound=A)
+
+def f(x: T) -> None:
+    if isinstance(x, B):
+        reveal_type(x) # E: Revealed type is '__main__.B'
+    else:
+        reveal_type(x) # E: Revealed type is 'T`-1'
+    reveal_type(x) # E: Revealed type is 'T`-1'
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceAndTypeType]
+from typing import Type
+def f(x: Type[int]) -> None:
+    if isinstance(x, type):
+        reveal_type(x) # E: Revealed type is 'Type[builtins.int]'
+    else:
+        reveal_type(x)  # Unreachable
+    reveal_type(x) # E: Revealed type is 'Type[builtins.int]'
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceVariableSubstitution]
+T = (int, str)
+U = (list, T)
+x: object = None
+
+if isinstance(x, T):
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+
+if isinstance(x, U):
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.list[Any], builtins.int, builtins.str]'
+
+if isinstance(x, (set, (list, T))):
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.set[Any], builtins.list[Any], builtins.int, builtins.str]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceTooFewArgs]
+isinstance() # E: Too few arguments for "isinstance"
+x: object
+if isinstance(): # E: Too few arguments for "isinstance"
+    x = 1
+    reveal_type(x) # E: Revealed type is 'builtins.int'
+if isinstance(x): # E: Too few arguments for "isinstance"
+    x = 1
+    reveal_type(x) # E: Revealed type is 'builtins.int'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceTooManyArgs]
+isinstance(1, 1, 1) # E: Too many arguments for "isinstance" \
+         # E: Argument 2 to "isinstance" has incompatible type "int"; expected "Union[type, tuple]"
+x: object
+if isinstance(x, str, 1): # E: Too many arguments for "isinstance"
+    reveal_type(x) # E: Revealed type is 'builtins.object'
+    x = 1
+    reveal_type(x) # E: Revealed type is 'builtins.int'
+[builtins fixtures/isinstancelist.pyi]
diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test
new file mode 100644
index 0000000..6d921b8
--- /dev/null
+++ b/test-data/unit/check-kwargs.test
@@ -0,0 +1,397 @@
+-- Test cases for keyword arguments.
+
+
+[case testTypeErrorInKeywordArgument]
+import typing
+def f(o: object) -> None: pass
+f(o=None()) # E: None not callable
+
+[case testSimpleKeywordArgument]
+import typing
+def f(a: 'A') -> None: pass
+f(a=A())
+f(a=object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A"
+class A: pass
+
+[case testTwoKeywordArgumentsNotInOrder]
+import typing
+def f(a: 'A', b: 'B') -> None: pass
+f(b=A(), a=A()) # E: Argument 1 to "f" has incompatible type "A"; expected "B"
+f(b=B(), a=B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A"
+f(a=A(), b=B())
+f(b=B(), a=A())
+class A: pass
+class B: pass
+
+[case testOneOfSeveralOptionalKeywordArguments]
+import typing
+def f(a: 'A' = None, b: 'B' = None, c: 'C' = None) -> None: pass
+f(a=A())
+f(b=B())
+f(c=C())
+f(b=B(), c=C())
+f(a=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "Optional[A]"
+f(b=A()) # E: Argument 1 to "f" has incompatible type "A"; expected "Optional[B]"
+f(c=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "Optional[C]"
+f(b=B(), c=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "Optional[C]"
+class A: pass
+class B: pass
+class C: pass
+
+[case testBothPositionalAndKeywordArguments]
+import typing
+def f(a: 'A', b: 'B') -> None: pass
+f(A(), b=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "B"
+f(A(), b=B())
+class A: pass
+class B: pass
+
+[case testContextSensitiveTypeInferenceForKeywordArg]
+from typing import List
+def f(a: 'A', b: 'List[A]') -> None: pass
+f(b=[], a=A())
+class A: pass
+[builtins fixtures/list.pyi]
+
+[case testGivingSameKeywordArgumentTwice]
+import typing
+def f(a: 'A', b: 'B') -> None: pass
+f(a=A(), b=B(), a=A()) # E: keyword argument repeated
+class A: pass
+class B: pass
+
+[case testGivingArgumentAsPositionalAndKeywordArg]
+import typing
+def f(a: 'A', b: 'B' = None) -> None: pass
+f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a"
+class A: pass
+class B: pass
+
+[case testGivingArgumentAsPositionalAndKeywordArg2]
+import typing
+def f(a: 'A' = None, b: 'B' = None) -> None: pass
+f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a"
+class A: pass
+class B: pass
+
+[case testPositionalAndKeywordForSameArg]
+# This used to crash in check_argument_count(). See #1095.
+def f(a: int): pass
+def g(): f(0, a=1)
+[out]
+
+[case testInvalidKeywordArgument]
+import typing
+def f(a: 'A') -> None: pass # N: "f" defined here
+f(b=object()) # E: Unexpected keyword argument "b" for "f"
+class A: pass
+
+[case testKeywordArgumentsWithDynamicallyTypedCallable]
+from typing import Any
+f = None # type: Any
+f(x=f(), z=None()) # E: None not callable
+f(f, zz=None()) # E: None not callable
+f(x=None)
+
+[case testKeywordArgumentWithFunctionObject]
+from typing import Callable
+f = None # type: Callable[[A, B], None]
+f(a=A(), b=B())
+f(A(), b=B())
+class A: pass
+class B: pass
+[out]
+main:3: error: Unexpected keyword argument "a"
+main:3: error: Unexpected keyword argument "b"
+main:4: error: Unexpected keyword argument "b"
+
+[case testKeywordOnlyArguments]
+import typing
+def f(a: 'A', *, b: 'B' = None) -> None: pass
+def g(a: 'A', *, b: 'B') -> None: pass
+def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass
+def i(a: 'A', *, b: 'B', aa: 'A' = None) -> None: pass
+f(A(), b=B())
+f(b=B(), a=A())
+f(A())
+f(A(), B()) # E: Too many positional arguments for "f"
+g(A(), b=B())
+g(b=B(), a=A())
+g(A()) # E: Missing named argument "b" for function "g"
+g(A(), B()) # E: Too many positional arguments for "g"
+h(A()) # E: Missing named argument "b" for function "h" # E: Missing named argument "aa" for function "h"
+h(A(), b=B()) # E: Missing named argument "aa" for function "h"
+h(A(), aa=A()) # E: Missing named argument "b" for function "h"
+h(A(), b=B(), aa=A())
+h(A(), aa=A(), b=B())
+i(A()) # E: Missing named argument "b" for function "i"
+i(A(), b=B())
+i(A(), aa=A()) # E: Missing named argument "b" for function "i"
+i(A(), b=B(), aa=A())
+i(A(), aa=A(), b=B())
+
+class A: pass
+class B: pass
+
+[case testKeywordOnlyArgumentsFastparse]
+
+import typing
+def f(a: 'A', *, b: 'B' = None) -> None: pass
+def g(a: 'A', *, b: 'B') -> None: pass
+def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass
+def i(a: 'A', *, b: 'B', aa: 'A' = None) -> None: pass
+f(A(), b=B())
+f(b=B(), a=A())
+f(A())
+f(A(), B()) # E: Too many positional arguments for "f"
+g(A(), b=B())
+g(b=B(), a=A())
+g(A()) # E: Missing named argument "b" for function "g"
+g(A(), B()) # E: Too many positional arguments for "g"
+h(A()) # E: Missing named argument "b" for function "h" # E: Missing named argument "aa" for function "h"
+h(A(), b=B()) # E: Missing named argument "aa" for function "h"
+h(A(), aa=A()) # E: Missing named argument "b" for function "h"
+h(A(), b=B(), aa=A())
+h(A(), aa=A(), b=B())
+i(A()) # E: Missing named argument "b" for function "i"
+i(A(), b=B())
+i(A(), aa=A()) # E: Missing named argument "b" for function "i"
+i(A(), b=B(), aa=A())
+i(A(), aa=A(), b=B())
+
+class A: pass
+class B: pass
+
+[case testKwargsAfterBareArgs]
+from typing import Tuple, Any
+def f(a, *, b=None) -> None: pass
+a = None  # type: Any
+b = None  # type: Any
+f(a, **b)
+
+[builtins fixtures/dict.pyi]
+
+[case testKeywordArgAfterVarArgs]
+import typing
+def f(*a: 'A', b: 'B' = None) -> None: pass
+f()
+f(A())
+f(A(), A())
+f(b=B())
+f(A(), b=B())
+f(A(), A(), b=B())
+f(B())      # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A"
+f(b=A())    # E: Argument 1 to "f" has incompatible type "A"; expected "Optional[B]"
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testKeywordArgAfterVarArgsWithBothCallerAndCalleeVarArgs]
+from typing import List
+def f(*a: 'A', b: 'B' = None) -> None: pass
+a = None # type: List[A]
+f(*a)
+f(A(), *a)
+f(b=B())
+f(*a, b=B())
+f(A(), *a, b=B())
+f(A(), B())   # E: Argument 2 to "f" has incompatible type "B"; expected "A"
+f(A(), b=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "Optional[B]"
+f(*a, b=A())  # E: Argument 2 to "f" has incompatible type "A"; expected "Optional[B]"
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testCallingDynamicallyTypedFunctionWithKeywordArgs]
+import typing
+def f(x, y=A()): pass
+f(x=A(), y=A())
+f(y=A(), x=A())
+f(y=A())      # E: Missing positional argument "x" in call to "f"
+f(A(), z=A()) # E: Unexpected keyword argument "z" for "f"
+class A: pass
+
+[case testKwargsArgumentInFunctionBody]
+from typing import Dict, Any
+def f( **kwargs: 'A') -> None:
+    d1 = kwargs # type: Dict[str, A]
+    d2 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type Dict[str, A], variable has type Dict[A, Any])
+    d3 = kwargs # type: Dict[Any, str] # E: Incompatible types in assignment (expression has type Dict[str, A], variable has type Dict[Any, str])
+class A: pass
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testKwargsArgumentInFunctionBodyWithImplicitAny]
+from typing import Dict, Any
+def f(**kwargs) -> None:
+    d1 = kwargs # type: Dict[str, A]
+    d2 = kwargs # type: Dict[str, str]
+    d3 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type Dict[str, Any], variable has type Dict[A, Any])
+class A: pass
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testCallingFunctionThatAcceptsVarKwargs]
+import typing
+def f( **kwargs: 'A') -> None: pass
+f()
+f(x=A())
+f(y=A(), z=A())
+f(x=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(A())   # E: Too many arguments for "f"
+# Perhaps a better message would be "Too many *positional* arguments..."
+class A: pass
+class B: pass
+[builtins fixtures/dict.pyi]
+
+[case testCallingFunctionWithKeywordVarArgs]
+from typing import Dict
+def f( **kwargs: 'A') -> None: pass
+d = None # type: Dict[str, A]
+f(**d)
+f(x=A(), **d)
+d2 = None # type: Dict[str, B]
+f(**d2)        # E: Argument 1 to "f" has incompatible type **Dict[str, B]; expected "A"
+f(x=A(), **d2) # E: Argument 2 to "f" has incompatible type **Dict[str, B]; expected "A"
+class A: pass
+class B: pass
+[builtins fixtures/dict.pyi]
+
+[case testKwargsAllowedInDunderCall]
+class Formatter:
+    def __call__(self, message: str, bold: bool = False) -> str:
+        pass
+
+formatter = Formatter()
+formatter("test", bold=True)
+reveal_type(formatter.__call__)  # E: Revealed type is 'def (message: builtins.str, bold: builtins.bool =) -> builtins.str'
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testKwargsAllowedInDunderCallKwOnly]
+class Formatter:
+    def __call__(self, message: str, *, bold: bool = False) -> str:
+        pass
+
+formatter = Formatter()
+formatter("test", bold=True)
+reveal_type(formatter.__call__)  # E: Revealed type is 'def (message: builtins.str, *, bold: builtins.bool =) -> builtins.str'
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testPassingMappingForKeywordVarArg]
+from typing import Mapping
+def f(**kwargs: 'A') -> None: pass
+b = None # type: Mapping
+d = None # type: Mapping[A, A]
+m = None # type: Mapping[str, A]
+f(**d)         # E: Keywords must be strings
+f(**m)
+f(**b)
+class A: pass
+[builtins fixtures/dict.pyi]
+
+[case testPassingMappingSubclassForKeywordVarArg]
+from typing import Mapping
+class MappingSubclass(Mapping[str, str]): pass
+def f(**kwargs: 'A') -> None: pass
+d = None # type: MappingSubclass
+f(**d)
+class A: pass
+[builtins fixtures/dict.pyi]
+
+[case testInvalidTypeForKeywordVarArg]
+from typing import Dict
+def f(**kwargs: 'A') -> None: pass
+d = None # type: Dict[A, A]
+f(**d)         # E: Keywords must be strings
+f(**A())       # E: Argument after ** must be a mapping, not "A"
+class A: pass
+[builtins fixtures/dict.pyi]
+
+[case testPassingKeywordVarArgsToNonVarArgsFunction]
+from typing import Any, Dict
+def f(a: 'A', b: 'B') -> None: pass
+d = None # type: Dict[str, Any]
+f(**d)
+d2 = None # type: Dict[str, A]
+f(**d2) # E: Argument 1 to "f" has incompatible type **Dict[str, A]; expected "B"
+class A: pass
+class B: pass
+[builtins fixtures/dict.pyi]
+
+[case testBothKindsOfVarArgs]
+from typing import Any, List, Dict
+def f(a: 'A', b: 'A') -> None: pass
+l = None # type: List[Any]
+d = None # type: Dict[Any, Any]
+f(*l, **d)
+class A: pass
+[builtins fixtures/dict.pyi]
+
+[case testKeywordArgumentAndCommentSignature]
+import typing
+def f(x): # type: (int) -> str # N: "f" defined here
+    pass
+f(x='') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+f(x=0)
+f(y=0) # E: Unexpected keyword argument "y" for "f"
+
+[case testKeywordArgumentAndCommentSignature2]
+import typing
+class A:
+    def f(self, x): # type: (int) -> str  # N: "f" of "A" defined here
+        pass
+A().f(x='') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+A().f(x=0)
+A().f(y=0) # E: Unexpected keyword argument "y" for "f" of "A"
+
+[case testKeywordVarArgsAndCommentSignature]
+import typing
+def f(**kwargs): # type: (**int) -> None
+    pass
+f(z=1)
+f(x=1, y=1)
+f(x='', y=1) # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+f(x=1, y='') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testCallsWithStars]
+def f(a: int) -> None:
+    pass
+
+s = ('',)
+f(*s) # E: Argument 1 to "f" has incompatible type *"Tuple[str]"; expected "int"
+
+a = {'': 0}
+f(a) # E: Argument 1 to "f" has incompatible type Dict[str, int]; expected "int"
+f(**a) # okay
+
+b = {'': ''}
+f(b) # E: Argument 1 to "f" has incompatible type Dict[str, str]; expected "int"
+f(**b) # E: Argument 1 to "f" has incompatible type **Dict[str, str]; expected "int"
+
+c = {0: 0}
+f(**c) # E: Keywords must be strings
+[builtins fixtures/dict.pyi]
+
+[case testCallStar2WithStar]
+def f(**k): pass
+f(*(1, 2))  # E: Too many arguments for "f"
+[builtins fixtures/dict.pyi]
+
+[case testUnexpectedMethodKwargInNestedClass]
+class A:
+    class B:
+        def __init__(self) -> None:  # N: "B" defined here
+            pass
+A.B(x=1)  # E: Unexpected keyword argument "x" for "B"
+
+[case testUnexpectedMethodKwargFromOtherModule]
+import m
+m.A(x=1)  # E: Unexpected keyword argument "x" for "A"
+[file m.py]
+class A:
+    def __init__(self) -> None:  # N: "A" defined here
+        pass
diff --git a/test-data/unit/check-lists.test b/test-data/unit/check-lists.test
new file mode 100644
index 0000000..c9c67e8
--- /dev/null
+++ b/test-data/unit/check-lists.test
@@ -0,0 +1,72 @@
+-- Nested list assignment
+-- -----------------------------
+
+[case testNestedListAssignment]
+from typing import List
+a1, b1, c1 = None, None, None # type: (A, B, C)
+a2, b2, c2 = None, None, None # type: (A, B, C)
+
+a1, [b1, c1] = a2, [b2, c2]
+a1, [a1, [b1, c1]] = a2, [a2, [b2, c2]]
+a1, [a1, [a1, b1]] = a1, [a1, [a1, c1]]  # E: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+class A: pass
+class B: pass
+class C: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNestedListAssignmentToTuple]
+from typing import List
+a, b, c = None, None, None # type: (A, B, C)
+
+a, b = [a, b]
+a, b = [a]  # E: Need more than 1 value to unpack (2 expected)
+a, b = [a, b, c]  # E: Too many values to unpack (2 expected, 3 provided)
+
+class A: pass
+class B: pass
+class C: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testListAssignmentFromTuple]
+from typing import List
+a, b, c = None, None, None # type: (A, B, C)
+t = a, b
+
+[a, b], c = t, c
+[a, c], c = t, c  # E: Incompatible types in assignment (expression has type "B", variable has type "C")
+[a, a, a], c = t, c  # E: Need more than 2 values to unpack (3 expected)
+[a], c = t, c  # E: Too many values to unpack (1 expected, 2 provided)
+
+class A: pass
+class B: pass
+class C: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testListAssignmentUnequalAmountToUnpack]
+from typing import List
+a, b, c = None, None, None # type: (A, B, C)
+
+def f() -> None: # needed because test parser tries to parse [a, b] as section header
+    [a, b] = [a, b]
+    [a, b] = [a]  # E: Need more than 1 value to unpack (2 expected)
+    [a, b] = [a, b, c]  # E: Too many values to unpack (2 expected, 3 provided)
+
+class A: pass
+class B: pass
+class C: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testListWithStarExpr]
+(x, *a) = [1, 2, 3]
+a = [1, *[2, 3]]
+reveal_type(a)  # E: Revealed type is 'builtins.list[builtins.int]'
+b = [0, *a]
+reveal_type(b)  # E: Revealed type is 'builtins.list[builtins.int*]'
+c = [*a, 0]
+reveal_type(c)  # E: Revealed type is 'builtins.list[builtins.int*]'
+[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test
new file mode 100644
index 0000000..b1b6857
--- /dev/null
+++ b/test-data/unit/check-modules.test
@@ -0,0 +1,1642 @@
+-- Type checker test cases dealing with modules and imports.
+
+[case testAccessImportedDefinitions]
+import m
+import typing
+m.f()           # E: Too few arguments for "f"
+m.f(object())   # E: Argument 1 to "f" has incompatible type "object"; expected "A"
+m.x = object()  # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+m.f(m.A())
+m.x = m.A()
+[file m.py]
+class A: pass
+def f(a: A) -> None: pass
+x = A()
+
+[case testAccessImportedDefinitions]
+import m
+import typing
+m.f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A"
+m.f(m.A())
+[file m.py]
+class A: pass
+def f(a: A) -> None: pass
+
+[case testAccessImportedDefinitions2]
+from m import f, A
+import typing
+f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A"
+f(A())
+[file m.py]
+class A: pass
+def f(a: A) -> None: pass
+
+[case testImportedExceptionType]
+import m
+import typing
+try:
+    pass
+except m.Err:
+    pass
+except m.Bad: # E: Exception type must be derived from BaseException
+    pass
+[file m.py]
+class Err(BaseException): pass
+class Bad: pass
+[builtins fixtures/exception.pyi]
+
+[case testImportedExceptionType2]
+from m import Err, Bad
+import typing
+try:
+    pass
+except Err:
+    pass
+except Bad: # E: Exception type must be derived from BaseException
+    pass
+[file m.py]
+class Err(BaseException): pass
+class Bad: pass
+[builtins fixtures/exception.pyi]
+
+[case testImportWithinBlock]
+import typing
+if 1:
+    import m
+    m.a = m.b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    m.a = m.a
+    m.f()
+    m.f(m.a)    # E: Too many arguments for "f"
+    m.a = m.A()
+    m.a = m.B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[file m.py]
+class A: pass
+class B: pass
+a = A()
+b = B()
+def f() -> None: pass
+
+[case testImportWithinFunction]
+import typing
+def f() -> None:
+    from m import a, b, f, A, B
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = a
+    f()
+    f(a)    # E: Too many arguments for "f"
+    a = A()
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[file m.py]
+class A: pass
+class B: pass
+a = A()
+b = B()
+def f() -> None: pass
+[out]
+
+[case testImportWithinMethod]
+import typing
+class C:
+    def f(self) -> None:
+        from m import *
+        a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+        a = a
+        f()
+        f(a)    # E: Too many arguments for "f"
+        a = A()
+        a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[file m.py]
+class A: pass
+class B: pass
+a = A()
+b = B()
+def f() -> None: pass
+[out]
+
+[case testImportWithinClassBody]
+import typing
+class C:
+    import m
+    m.f()
+    m.f(C) # E: Too many arguments for "f"
+[file m.py]
+def f() -> None: pass
+[out]
+
+[case testImportWithinClassBody2]
+import typing
+class C:
+    from m import f
+    f()
+    f(C) # E: Too many arguments for "f"
+[file m.py]
+def f() -> None: pass
+[out]
+
+[case testImportWithStub]
+import _m
+_m.f("hola")
+[file _m.pyi]
+def f(c:str) -> None: pass
+[out]
+
+[case testImportWithStubIncompatibleType]
+import _m
+_m.f("hola")
+_m.f(12)  # E: Argument 1 to "f" has incompatible type "int"; expected "str"
+[file _m.py]
+def f(c):
+  print(c)
+[file _m.pyi]
+def f(c:str) -> None: pass
+
+[case testInvalidOperationsOnModules]
+import m
+import typing
+
+class A: pass
+m()      # E: Module not callable
+a = m # type: A  # E: Incompatible types in assignment (expression has type Module, variable has type "A")
+m + None # E: Unsupported left operand type for + (Module)
+[file m.py]
+[builtins fixtures/module.pyi]
+
+[case testNameDefinedInDifferentModule]
+import m, n
+import typing
+m.x # E: Module has no attribute "x"
+[file m.py]
+y = object()
+[file n.py]
+x = object()
+[builtins fixtures/module.pyi]
+
+[case testChainedAssignmentAndImports]
+import m
+
+i, s = None, None # type: (int, str)
+i = m.x
+i = m.y
+s = m.x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+s = m.y # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[file m.py]
+x = y = 1
+[builtins fixtures/primitives.pyi]
+
+[case testConditionalFunctionDefinitionAndImports]
+import m
+import typing
+m.f(1)
+m.f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[file m.py]
+x = object()
+if x:
+    def f(x: int) -> None: pass
+else:
+    def f(x: int) -> None: pass
+
+[case testTypeCheckWithUnknownModule]
+import nonexistent
+None + ''
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckWithUnknownModule2]
+import m, nonexistent
+None + ''
+m.x = 1
+m.x = ''
+[file m.py]
+x = 1
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testTypeCheckWithUnknownModule3]
+import nonexistent, m
+None + ''
+m.x = 1
+m.x = ''
+[file m.py]
+x = 1
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testTypeCheckWithUnknownModule4]
+import nonexistent, another
+None + ''
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:1: error: Cannot find module named 'another'
+main:2: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckWithUnknownModule5]
+import nonexistent as x
+None + ''
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckWithUnknownModuleUsingFromImport]
+from nonexistent import x
+None + ''
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckWithUnknownModuleUsingImportStar]
+from nonexistent import *
+None + ''
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+
+[case testAccessingUnknownModule]
+import xyz
+xyz.foo()
+xyz()
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testAccessingUnknownModule2]
+import xyz, bar
+xyz.foo()
+bar()
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:1: error: Cannot find module named 'bar'
+
+[case testAccessingUnknownModule3]
+import xyz as z
+xyz.foo()
+z()
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Name 'xyz' is not defined
+
+[case testAccessingNameImportedFromUnknownModule]
+from xyz import y, z
+y.foo()
+z()
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testAccessingNameImportedFromUnknownModule2]
+from xyz import *
+y
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Name 'y' is not defined
+
+[case testAccessingNameImportedFromUnknownModule3]
+from xyz import y as z
+y
+z
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Name 'y' is not defined
+
+[case testUnknownModuleRedefinition]
+import xab
+def xab(): pass
+[out]
+main:1: error: Cannot find module named 'xab'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testAccessingUnknownModuleFromOtherModule]
+import x
+x.nonexistent.foo
+x.z
+[file x.py]
+import nonexistent
+[builtins fixtures/module.pyi]
+[out]
+tmp/x.py:1: error: Cannot find module named 'nonexistent'
+tmp/x.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:3: error: Module has no attribute "z"
+
+[case testUnknownModuleImportedWithinFunction]
+def f():
+    import foobar
+def foobar(): pass
+foobar('')
+[out]
+main:2: error: Cannot find module named 'foobar'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:4: error: Too many arguments for "foobar"
+
+[case testUnknownModuleImportedWithinFunction2]
+def f():
+    from foobar import x
+def x(): pass
+x('')
+[out]
+main:2: error: Cannot find module named 'foobar'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:4: error: Too many arguments for "x"
+
+[case testRelativeImports]
+import typing
+import m.a
+m.a.x = m.a.y # Error
+[file m/__init__.py]
+[file m/a.py]
+import typing
+from .b import A, B, x, y
+z = x
+z = y # Error
+[file m/b.py]
+import typing
+class A: pass
+class B: pass
+x = A()
+y = B()
+[out]
+tmp/m/a.py:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testRelativeImports2]
+import typing
+import m.a
+m.a.x = m.a.y # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[file m/__init__.py]
+[file m/a.py]
+import typing
+from .b import A, B, x, y
+[file m/b.py]
+import typing
+class A: pass
+class B: pass
+x = A()
+y = B()
+
+[case testExportedValuesInImportAll]
+import typing
+from m import *
+_ = a
+_ = b
+_ = c
+_ = d
+_ = e
+_ = f # E: Name 'f' is not defined
+_ = _g # E: Name '_g' is not defined
+[file m.py]
+__all__ = ['a']
+__all__ += ('b',)
+__all__.append('c')
+__all__.extend(('d', 'e'))
+
+a = b = c = d = e = f = _g = 1
+[builtins fixtures/module_all.pyi]
+
+[case testAllMustBeSequenceStr]
+import typing
+__all__ = [1, 2, 3]
+[builtins fixtures/module_all.pyi]
+[out]
+main:2: error: Type of __all__ must be Sequence[str], not List[int]
+
+[case testAllMustBeSequenceStr_python2]
+import typing
+__all__ = [1, 2, 3]
+[builtins_py2 fixtures/module_all_python2.pyi]
+[out]
+main:2: error: Type of __all__ must be Sequence[unicode], not List[int]
+
+[case testAllUnicodeSequenceOK_python2]
+import typing
+__all__ = [u'a', u'b', u'c']
+[builtins_py2 fixtures/module_all_python2.pyi]
+
+[out]
+
+[case testEllipsisInitializerInStubFileWithType]
+import m
+m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[file m.pyi]
+x = ... # type: int
+
+[case testEllipsisInitializerInStubFileWithoutType]
+import m
+m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "ellipsis")
+[file m.pyi]
+# Ellipsis is only special with a # type: comment (not sure though if this is great)
+x = ...
+
+[case testEllipsisInitializerInModule]
+x = ... # type: int # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
+
+[case testEllipsisDefaultArgValueInStub]
+import m
+m.f(1)
+m.f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[file m.pyi]
+def f(x: int = ...) -> None: pass
+
+[case testEllipsisDefaultArgValueInStub2]
+import m
+def f(x: int = ...) -> None: pass
+[file m.pyi]
+def g(x: int = '') -> None: pass
+[out]
+tmp/m.pyi:1: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:2: error: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
+
+[case testEllipsisDefaultArgValueInNonStub]
+def f(x: int = ...) -> None: pass # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
+[out]
+
+[case testStarImportOverlapping]
+from m1 import *
+from m2 import *
+j = ''
+[file m1.py]
+x = 1
+[file m2.py]
+x = 1
+
+[case testStarImportOverlappingMismatch]
+from m1 import *
+from m2 import * # E: Incompatible import of "x" (imported name has type "int", local name has type "str")
+j = ''
+[file m1.py]
+x = ''
+[file m2.py]
+x = 1
+
+[case testStarImportOverridingLocalImports-skip]
+from m1 import *
+from m2 import *
+x = '' # E: TODO (cannot assign str to int)
+[file m1.py]
+x = 1
+[file m2.py]
+x = 1
+
+[case testAssignToFuncDefViaImport]
+from m import *  # E: Incompatible import of "x" (imported name has type "int", local name has type "str")
+f = None
+x = ''
+[file m.py]
+def f(): pass
+x = 1+0
+[out]
+
+
+-- Conditional definitions and function redefinitions via module object
+-- --------------------------------------------------------------------
+
+
+[case testConditionalImportAndAssign]
+try:
+    from m import x
+except:
+    x = None
+try:
+    from m import x as y
+except:
+    y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[file m.py]
+x = ''
+
+[case testAssignAndConditionalImport]
+x = ''
+try:
+    from m import x
+except:
+    pass
+y = 1
+try:
+    from m import x as y  # E: Incompatible import of "y" (imported name has type "str", local name has type "int")
+except:
+    pass
+[file m.py]
+x = ''
+
+[case testAssignAndConditionalStarImport]
+x = ''
+y = 1
+try:
+    from m import * # E: Incompatible import of "y" (imported name has type "str", local name has type "int")
+except:
+    pass
+[file m.py]
+x = ''
+y = ''
+
+[case testRedefineImportedFunctionViaImport]
+try:
+    from m import f, g
+except:
+    def f(x): pass
+    def g(x): pass # E: All conditional function variants must have identical signatures
+[file m.py]
+def f(x): pass
+def g(x, y): pass
+
+[case testImportedVariableViaImport]
+try:
+    from m import x
+except:
+    from n import x # E: Incompatible import of "x" (imported name has type "str", local name has type "int")
+[file m.py]
+x = 1
+[file n.py]
+x = ''
+
+[case testRedefineFunctionViaImport]
+def f(x): pass
+def g(x): pass
+try:
+    from m import f, g # E: Incompatible import of "g" (imported name has type Callable[[Any, Any], Any], local name has type Callable[[Any], Any])
+except:
+    pass
+[file m.py]
+def f(x): pass
+def g(x, y): pass
+
+[case testImportVariableAndAssignNone]
+try:
+    from m import x
+except:
+    x = None
+[file m.py]
+x = 1
+
+[case testImportFunctionAndAssignNone]
+try:
+    from m import f
+except:
+    f = None
+[file m.py]
+def f(): pass
+
+[case testImportFunctionAndAssignFunction]
+def g(x): pass
+try:
+    from m import f
+except:
+    f = g
+[file m.py]
+def f(x): pass
+
+[case testImportFunctionAndAssignIncompatible]
+try:
+    from m import f
+except:
+    f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+[file m.py]
+def f(): pass
+
+[case testAssignToFuncDefViaGlobalDecl2]
+import typing
+from m import f
+def g() -> None:
+    global f
+    f = None
+    f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+[file m.py]
+def f(): pass
+[out]
+
+[case testAssignToFuncDefViaNestedModules]
+import m.n
+m.n.f = None
+m.n.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+[file m/__init__.py]
+[file m/n.py]
+def f(): pass
+[out]
+
+[case testAssignToFuncDefViaModule]
+import m
+m.f = None
+m.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+[file m.py]
+def f(): pass
+[out]
+
+[case testConditionalImportAndAssignNoneToModule]
+if object():
+    import m
+else:
+    m = None
+m.f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
+[file m.py]
+def f(x: str) -> None: pass
+[builtins fixtures/module.pyi]
+[out]
+
+[case testConditionalImportAndAssignInvalidToModule]
+if object():
+    import m
+else:
+    m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Module)
+[file m.py]
+[builtins fixtures/module.pyi]
+[out]
+
+[case testImportAndAssignToModule]
+import m
+m = None
+m.f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
+[file m.py]
+def f(x: str) -> None: pass
+[builtins fixtures/module.pyi]
+[out]
+
+
+-- Test cases that simulate 'mypy -m modname'
+--
+-- The module name to import is encoded in a comment.
+
+[case testTypeCheckNamedModule]
+# cmd: mypy -m m.a
+[file m/__init__.py]
+None + 1
+[file m/a.py]
+[out]
+tmp/m/__init__.py:1: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckNamedModule2]
+# cmd: mypy -m m.a
+[file m/__init__.py]
+[file m/a.py]
+None + 1
+[out]
+tmp/m/a.py:1: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckNamedModule3]
+# cmd: mypy -m m
+[file m/__init__.py]
+None + 1
+[file m/a.py]
+[out]
+tmp/m/__init__.py:1: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckNamedModule4]
+# cmd: mypy -m m
+[file m/__init__.py]
+[file m/a.py]
+None + 1  # Not analyzed.
+[out]
+
+[case testTypeCheckNamedModule5]
+# cmd: mypy -m m
+None + ''  # Not analyzed.
+[file m.py]
+None + 1
+[out]
+tmp/m.py:1: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckNamedModuleWithImportCycle]
+# cmd: mypy -m m.a
+None + 1  # Does not generate error, as this file won't be analyzed.
+[file m/__init__.py]
+import m.a
+[file m/a.py]
+[out]
+
+
+-- Checks dealing with submodules and different kinds of imports
+-- -------------------------------------------------------------
+
+[case testSubmoduleRegularImportAddsAllParents]
+import a.b.c
+reveal_type(a.value)  # E: Revealed type is 'builtins.int'
+reveal_type(a.b.value)  # E: Revealed type is 'builtins.str'
+reveal_type(a.b.c.value)  # E: Revealed type is 'builtins.float'
+b.value  # E: Name 'b' is not defined
+c.value  # E: Name 'c' is not defined
+
+[file a/__init__.py]
+value = 3
+[file a/b/__init__.py]
+value = "a"
+[file a/b/c.py]
+value = 3.2
+[out]
+
+[case testSubmoduleImportAsDoesNotAddParents]
+import a.b.c as foo
+reveal_type(foo.value)  # E: Revealed type is 'builtins.float'
+a.value  # E: Name 'a' is not defined
+b.value  # E: Name 'b' is not defined
+c.value  # E: Name 'c' is not defined
+
+[file a/__init__.py]
+value = 3
+[file a/b/__init__.py]
+value = "a"
+[file a/b/c.py]
+value = 3.2
+[out]
+
+[case testSubmoduleImportFromDoesNotAddParents]
+from a import b
+reveal_type(b.value)  # E: Revealed type is 'builtins.str'
+b.c.value  # E: Module has no attribute "c"
+a.value  # E: Name 'a' is not defined
+
+[file a/__init__.py]
+value = 3
+[file a/b/__init__.py]
+value = "a"
+[file a/b/c.py]
+value = 3.2
+[builtins fixtures/module.pyi]
+[out]
+
+[case testSubmoduleImportFromDoesNotAddParents2]
+from a.b import c
+reveal_type(c.value)  # E: Revealed type is 'builtins.float'
+a.value  # E: Name 'a' is not defined
+b.value  # E: Name 'b' is not defined
+
+[file a/__init__.py]
+value = 3
+[file a/b/__init__.py]
+value = "a"
+[file a/b/c.py]
+value = 3.2
+[out]
+
+[case testSubmoduleRegularImportNotDirectlyAddedToParent]
+import a.b.c
+def accept_float(x: float) -> None: pass
+accept_float(a.b.c.value)
+
+[file a/__init__.py]
+value = 3
+b.value
+a.b.value
+
+[file a/b/__init__.py]
+value = "a"
+c.value
+a.b.c.value
+
+[file a/b/c.py]
+value = 3.2
+[out]
+tmp/a/b/__init__.py:2: error: Name 'c' is not defined
+tmp/a/b/__init__.py:3: error: Name 'a' is not defined
+tmp/a/__init__.py:2: error: Name 'b' is not defined
+tmp/a/__init__.py:3: error: Name 'a' is not defined
+
+[case testSubmoduleMixingLocalAndQualifiedNames]
+from a.b import MyClass
+val1 = None  # type: a.b.MyClass  # E: Name 'a' is not defined
+val2 = None  # type: MyClass
+
+[file a/__init__.py]
+[file a/b.py]
+class MyClass: pass
+[out]
+
+[case testSubmoduleMixingImportFrom]
+import parent.child
+
+[file parent/__init__.py]
+
+[file parent/common.py]
+class SomeClass: pass
+
+[file parent/child.py]
+from parent.common import SomeClass
+from parent import common
+foo = parent.common.SomeClass()
+
+[builtins fixtures/module.pyi]
+[out]
+tmp/parent/child.py:3: error: Name 'parent' is not defined
+
+[case testSubmoduleMixingImportFromAndImport]
+import parent.child
+
+[file parent/__init__.py]
+
+[file parent/common.py]
+class SomeClass: pass
+
+[file parent/unrelated.py]
+class ShouldNotLoad: pass
+
+[file parent/child.py]
+from parent.common import SomeClass
+import parent
+
+# Note, since this might be unintuitive -- when `parent.common` is loaded in any way,
+# shape, or form, it's added to `parent`'s namespace, which is why the below line
+# succeeds.
+foo = parent.common.SomeClass()
+reveal_type(foo)
+bar = parent.unrelated.ShouldNotLoad()
+
+[builtins fixtures/module.pyi]
+[out]
+tmp/parent/child.py:8: error: Revealed type is 'parent.common.SomeClass'
+tmp/parent/child.py:9: error: Module has no attribute "unrelated"
+
+[case testSubmoduleMixingImportFromAndImport2]
+import parent.child
+
+[file parent/__init__.py]
+
+[file parent/common.py]
+class SomeClass: pass
+
+[file parent/child.py]
+from parent import common
+import parent
+foo = parent.common.SomeClass()
+reveal_type(foo)
+
+[builtins fixtures/module.pyi]
+[out]
+tmp/parent/child.py:4: error: Revealed type is 'parent.common.SomeClass'
+
+-- Tests repeated imports
+
+[case testIdenticalImportFromTwice]
+from a import x, y, z
+from b import x, y, z
+[file a.py]
+from common import x, y, z
+[file b.py]
+from common import x, y, z
+[file common.py]
+x = 3
+def y() -> int: return 3
+class z: pass
+[out]
+
+[case testIdenticalImportStarTwice]
+from a import *
+from b import *
+[file a.py]
+from common import x, y, z
+[file b.py]
+from common import x, y, z
+[file common.py]
+x = 3
+def y() -> int: return 3
+class z: pass
+[out]
+
+[case testDifferentImportSameNameTwice]
+from a import x, y, z
+from b import x, y, z
+[file a.py]
+x = 3
+def y() -> int: return 1
+class z: pass
+[file b.py]
+x = "foo"
+def y() -> str: return "foo"
+class z: pass
+[out]
+main:2: error: Incompatible import of "x" (imported name has type "str", local name has type "int")
+main:2: error: Incompatible import of "y" (imported name has type Callable[[], str], local name has type Callable[[], int])
+main:2: error: Incompatible import of "z" (imported name has type Type[b.z], local name has type Type[a.z])
+
+-- Misc
+
+[case testInheritFromBadImport]
+# cmd: mypy -m bar
+[file foo.py]
+pass
+[file bar.py]
+from foo import B
+class C(B):
+    pass
+[out]
+tmp/bar.py:1: error: Module 'foo' has no attribute 'B'
+
+[case testImportSuppressedWhileAlmostSilent]
+# cmd: mypy -m main
+# flags: --follow-imports=error
+[file main.py]
+import mod
+[file mod.py]
+[builtins fixtures/module.pyi]
+[out]
+tmp/main.py:1: note: Import of 'mod' ignored
+tmp/main.py:1: note: (Using --follow-imports=error, module not passed on command line)
+
+[case testAncestorSuppressedWhileAlmostSilent]
+# cmd: mypy -m foo.bar
+# flags: --follow-imports=error
+[file foo/bar.py]
+[file foo/__init__.py]
+[builtins fixtures/module.pyi]
+[out]
+tmp/foo/bar.py: note: Ancestor package 'foo' ignored
+tmp/foo/bar.py: note: (Using --follow-imports=error, submodule passed on command line)
+
+[case testStubImportNonStubWhileSilent]
+# cmd: mypy -m main
+# flags: --follow-imports=skip
+[file main.py]
+from stub import x # Permitted
+from other import y # Disallowed
+x + '' # Error here
+y + '' # But not here
+[file stub.pyi]
+from non_stub import x
+[file non_stub.py]
+x = 42
+[file other.py]
+y = 42
+[builtins fixtures/module.pyi]
+[out]
+tmp/main.py:3: error: Unsupported left operand type for + ("int")
+
+[case testSilentSubmoduleImport]
+# cmd: mypy -m foo
+# flags: --follow-imports=skip
+[file foo/__init__.py]
+from foo import bar
+[file foo/bar.py]
+pass
+
+[case testSuperclassInImportCycle]
+import a
+import d
+a.A().f(d.D())
+[file a.py]
+if 0:
+    import d
+class B: pass
+class C(B): pass
+class A:
+    def f(self, x: B) -> None: pass
+[file d.py]
+import a
+class D(a.C): pass
+
+[case testSuperclassInImportCycleReversedImports]
+import d
+import a
+a.A().f(d.D())
+[file a.py]
+if 0:
+    import d
+class B: pass
+class C(B): pass
+class A:
+    def f(self, x: B) -> None: pass
+[file d.py]
+import a
+class D(a.C): pass
+
+[case testPreferPackageOverFile]
+import a
+[file a.py]
+/  # intentional syntax error -- this file shouldn't be parsed
+[file a/__init__.py]
+pass
+[out]
+
+[case testPreferPackageOverFile2]
+from a import x
+[file a.py]
+/  # intentional syntax error -- this file shouldn't be parsed
+[file a/__init__.py]
+x = 0
+[out]
+
+[case testImportInClass]
+class C:
+    import foo
+reveal_type(C.foo.bar)  # E: Revealed type is 'builtins.int'
+[file foo.py]
+bar = 0
+[builtins fixtures/module.pyi]
+[out]
+
+[case testIfFalseImport]
+if False:
+    import a
+def f(x: 'a.A') -> int:
+    return x.f()
+[file a.py]
+class A:
+    def f(self) -> int:
+        return 0
+[builtins fixtures/bool.pyi]
+
+
+-- Test stability under import cycles
+-- ----------------------------------
+
+-- The first two tests are identical except one main has 'import x'
+-- and the other 'import y'.  Previously (before build.order_ascc()
+-- was added) one of these would fail because the imports were
+-- processed in the (reverse) order in which the files were
+-- encountered.
+
+[case testImportCycleStability1]
+import x
+[file x.py]
+def f() -> str: return ''
+class Base:
+    attr = f()
+def foo():
+    import y
+[file y.py]
+import x
+class Sub(x.Base):
+    attr = x.Base.attr
+[out]
+
+[case testImportCycleStability2]
+import y
+[file x.py]
+def f() -> str: return ''
+class Base:
+    attr = f()
+def foo():
+    import y
+[file y.py]
+import x
+class Sub(x.Base):
+    attr = x.Base.attr
+[out]
+
+-- This case isn't fixed by order_ascc(), but is fixed by the
+-- lightweight type inference added to semanal.py
+-- (analyze_simple_literal_type()).
+
+[case testImportCycleStability3]
+import y
+[file x.py]
+class Base:
+    pass
+def foo() -> int:
+    import y
+    reveal_type(y.Sub.attr)
+    return y.Sub.attr
+[file y.py]
+import x
+class Sub(x.Base):
+    attr = 0
+[out]
+tmp/x.py:5: error: Revealed type is 'builtins.int'
+
+-- This case has a symmetrical cycle, so it doesn't matter in what
+-- order the files are processed.  It depends on the lightweight type
+-- interference.
+
+[case testImportCycleStability4]
+import x
+[file x.py]
+import y
+class C:
+    attr = ''
+def foo() -> int:
+    return y.D.attr
+[file y.py]
+import x
+class D:
+    attr = 0
+def bar() -> str:
+    return x.C.attr
+
+-- These cases test all supported literal types.
+
+[case testImportCycleStability5]
+import y
+[file x.py]
+class Base:
+    pass
+def foo() -> None:
+    import y
+    i = y.Sub.iattr  # type: int
+    f = y.Sub.fattr  # type: float
+    s = y.Sub.sattr  # type: str
+    b = y.Sub.battr  # type: bytes
+[file y.py]
+import x
+class Sub(x.Base):
+    iattr = 0
+    fattr = 0.0
+    sattr = ''
+    battr = b''
+[out]
+
+[case testImportCycleStability6_python2]
+import y
+[file x.py]
+class Base:
+    pass
+def foo():
+    # type: () -> None
+    import y
+    i = y.Sub.iattr  # type: int
+    f = y.Sub.fattr  # type: float
+    s = y.Sub.sattr  # type: str
+    u = y.Sub.uattr  # type: unicode
+[file y.py]
+import x
+class Sub(x.Base):
+    iattr = 0
+    fattr = 0.0
+    sattr = ''
+    uattr = u''
+[out]
+
+-- This case tests module-level variables.
+
+[case testImportCycleStability7]
+import x
+[file x.py]
+def foo() -> int:
+    import y
+    reveal_type(y.value)
+    return y.value
+[file y.py]
+import x
+value = 12
+[out]
+tmp/x.py:3: error: Revealed type is 'builtins.int'
+
+-- This is not really cycle-related but still about the lightweight
+-- type checker.
+
+[case testImportCycleStability8]
+x = 1  # type: str
+reveal_type(x)
+[out]
+main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+main:2: error: Revealed type is 'builtins.str'
+
+-- Tests for cross-module second_pass checking.
+
+[case testSymmetricImportCycle1]
+import a
+[file a.py]
+import b
+def f() -> int:
+    return b.x
+y = 0 + 0
+[file b.py]
+import a
+def g() -> int:
+    reveal_type(a.y)
+    return a.y
+x = 1 + 1
+[out]
+tmp/b.py:3: error: Revealed type is 'builtins.int'
+
+[case testSymmetricImportCycle2]
+import b
+[file a.py]
+import b
+def f() -> int:
+    reveal_type(b.x)
+    return b.x
+y = 0 + 0
+[file b.py]
+import a
+def g() -> int:
+    return a.y
+x = 1 + 1
+[out]
+tmp/a.py:3: error: Revealed type is 'builtins.int'
+
+[case testThreePassesRequired]
+import b
+[file a.py]
+import b
+class C:
+    def f1(self) -> None:
+        self.x2
+    def f2(self) -> None:
+        self.x2 = b.b
+[file b.py]
+import a
+b = 1 + 1
+[out]
+tmp/a.py:4: error: Cannot determine type of 'x2'
+
+[case testErrorInPassTwo1]
+import b
+[file a.py]
+import b
+def f() -> None:
+    a = b.x + 1
+    a + ''
+[file b.py]
+import a
+x = 1 + 1
+[out]
+tmp/a.py:4: error: Unsupported operand types for + ("int" and "str")
+
+[case testErrorInPassTwo2]
+import a
+[file a.py]
+import b
+def f() -> None:
+    a = b.x + 1
+    a + ''
+[file b.py]
+import a
+x = 1 + 1
+[out]
+tmp/a.py:4: error: Unsupported operand types for + ("int" and "str")
+
+[case testDeferredDecorator]
+import a
+[file a.py]
+import b
+def g() -> None:
+    f('')
+ at b.deco
+def f(a: str) -> int: pass
+reveal_type(f)
+x = 1 + 1
+[file b.py]
+from typing import Callable, TypeVar
+import a
+T = TypeVar('T')
+def deco(f: Callable[[T], int]) -> Callable[[T], int]:
+    a.x
+    return f
+[out]
+tmp/a.py:6: error: Revealed type is 'def (builtins.str*) -> builtins.int'
+
+[case testDeferredClassContext]
+class A:
+    def f(self) -> str: return 'foo'
+class B(A):
+    def f(self) -> str: return self.x
+    def initialize(self): self.x = 'bar'
+[out]
+
+
+-- Scripts and __main__
+
+[case testScriptsAreModules]
+# flags: --scripts-are-modules
+[file a]
+pass
+[file b]
+pass
+
+[case testScriptsAreNotModules]
+# cmd: mypy a b
+[file a]
+pass
+[file b]
+pass
+[out]
+
+[case testTypeCheckPrio]
+# cmd: mypy -m part1 part2 part3 part4
+
+[file part1.py]
+from part3 import Thing
+class FirstThing: pass
+
+[file part2.py]
+from part4 import part4_thing as Thing
+
+[file part3.py]
+from part2 import Thing
+reveal_type(Thing)
+
+[file part4.py]
+from typing import TYPE_CHECKING
+if TYPE_CHECKING:
+    from part1 import FirstThing
+def part4_thing(a: int) -> str: pass
+
+[builtins fixtures/bool.pyi]
+[out]
+tmp/part3.py:2: error: Revealed type is 'def (a: builtins.int) -> builtins.str'
+
+[case testImportStarAliasAnyList]
+import bar
+
+[file bar.py]
+from foo import *
+def bar(y: AnyAlias) -> None:  pass
+
+l = None # type: ListAlias[int]
+reveal_type(l)
+
+[file foo.py]
+from typing import Any, List
+AnyAlias = Any
+ListAlias = List
+[builtins fixtures/list.pyi]
+[out]
+tmp/bar.py:5: error: Revealed type is 'builtins.list[builtins.int]'
+
+[case testImportStarAliasSimpleGeneric]
+from ex2a import *
+
+def do_something(dic: Row) -> None:
+    pass
+
+def do_another() -> Row:
+    return {}
+
+do_something({'good': 'bad'}) # E: Dict entry 0 has incompatible type "str": "str"
+reveal_type(do_another()) # E: Revealed type is 'builtins.dict[builtins.str, builtins.int]'
+
+[file ex2a.py]
+from typing import Dict
+Row = Dict[str, int]
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testImportStarAliasGeneric]
+from y import *
+notes = None  # type: G[X]
+another = G[X]()
+second = XT[str]()
+last = XT[G]()
+
+reveal_type(notes) # E: Revealed type is 'y.G[y.G[builtins.int]]'
+reveal_type(another) # E: Revealed type is 'y.G[y.G*[builtins.int]]'
+reveal_type(second) # E: Revealed type is 'y.G[builtins.str*]'
+reveal_type(last) # E: Revealed type is 'y.G[y.G*]'
+
+[file y.py]
+from typing import Generic, TypeVar
+
+T = TypeVar('T')
+
+class G(Generic[T]):
+    pass
+
+X = G[int]
+XT = G[T]
+[out]
+
+[case testImportStarAliasCallable]
+from foo import *
+from typing import Any
+
+def bar(x: Any, y: AnyCallable) -> Any:
+    return 'foo'
+
+cb = None # type: AnyCallable
+reveal_type(cb) # E: Revealed type is 'def (*Any, **Any) -> Any'
+
+[file foo.py]
+from typing import Callable, Any
+AnyCallable = Callable[..., Any]
+[out]
+
+[case testRevealType]
+import types
+def f() -> types.ModuleType:
+    return types
+reveal_type(f())  # E: Revealed type is 'types.ModuleType'
+reveal_type(types)  # E: Revealed type is 'types.ModuleType'
+
+[builtins fixtures/module.pyi]
+
+[case testClassImportAccessedInMethod]
+class C:
+    import m
+    def foo(self) -> None:
+        x = self.m.a
+        reveal_type(x)  # E: Revealed type is 'builtins.str'
+        # ensure we distinguish self from other variables
+        y = 'hello'
+        z = y.m.a  # E: "str" has no attribute "m"
+    @classmethod
+    def cmethod(cls) -> None:
+        y = cls.m.a
+        reveal_type(y)  # E: Revealed type is 'builtins.str'
+    @staticmethod
+    def smethod(foo: int) -> None:
+        # we aren't confused by first arg of a staticmethod
+        y = foo.m.a  # E: "int" has no attribute "m"
+
+[file m.py]
+a = 'foo'
+
+[builtins fixtures/module.pyi]
+
+[case testModuleAlias]
+import m
+m2 = m
+reveal_type(m2.a)  # E: Revealed type is 'builtins.str'
+m2.b  # E: Module has no attribute "b"
+m2.c = 'bar'  # E: Module has no attribute "c"
+
+[file m.py]
+a = 'foo'
+
+[builtins fixtures/module.pyi]
+
+[case testClassModuleAlias]
+import m
+
+class C:
+    x = m
+    def foo(self) -> None:
+        reveal_type(self.x.a)  # E: Revealed type is 'builtins.str'
+
+[file m.py]
+a = 'foo'
+
+[builtins fixtures/module.pyi]
+
+[case testLocalModuleAlias]
+import m
+
+def foo() -> None:
+    x = m
+    reveal_type(x.a)  # E: Revealed type is 'builtins.str'
+
+class C:
+    def foo(self) -> None:
+        x = m
+        reveal_type(x.a)  # E: Revealed type is 'builtins.str'
+
+[file m.py]
+a = 'foo'
+
+[builtins fixtures/module.pyi]
+
+[case testChainedModuleAlias]
+import m
+m3 = m2 = m
+m4 = m3
+m5 = m4
+reveal_type(m2.a)  # E: Revealed type is 'builtins.str'
+reveal_type(m3.a)  # E: Revealed type is 'builtins.str'
+reveal_type(m4.a)  # E: Revealed type is 'builtins.str'
+reveal_type(m5.a)  # E: Revealed type is 'builtins.str'
+
+[file m.py]
+a = 'foo'
+
+[builtins fixtures/module.pyi]
+
+[case testMultiModuleAlias]
+import m, n
+m2, n2, (m3, n3) = m, n, [m, n]
+reveal_type(m2.a)  # E: Revealed type is 'builtins.str'
+reveal_type(n2.b)  # E: Revealed type is 'builtins.str'
+reveal_type(m3.a)  # E: Revealed type is 'builtins.str'
+reveal_type(n3.b)  # E: Revealed type is 'builtins.str'
+
+x, y = m  # E: 'types.ModuleType' object is not iterable
+x, y, z = m, n  # E: Need more than 2 values to unpack (3 expected)
+x, y = m, m, m  # E: Too many values to unpack (2 expected, 3 provided)
+x, (y, z) = m, n  # E: 'types.ModuleType' object is not iterable
+x, (y, z) = m, (n, n, n)  # E: Too many values to unpack (2 expected, 3 provided)
+
+[file m.py]
+a = 'foo'
+
+[file n.py]
+b = 'bar'
+
+[builtins fixtures/module.pyi]
+
+[case testModuleAliasWithExplicitAnnotation]
+from typing import Any
+import types
+import m
+mod_mod: types.ModuleType = m
+mod_mod2: types.ModuleType
+mod_mod2 = m
+mod_mod3 = m  # type: types.ModuleType
+mod_any: Any = m
+mod_int: int = m  # E: Incompatible types in assignment (expression has type Module, variable has type "int")
+
+reveal_type(mod_mod)  # E: Revealed type is 'types.ModuleType'
+mod_mod.a  # E: Module has no attribute "a"
+reveal_type(mod_mod2)  # E: Revealed type is 'types.ModuleType'
+mod_mod2.a  # E: Module has no attribute "a"
+reveal_type(mod_mod3)  # E: Revealed type is 'types.ModuleType'
+mod_mod3.a  # E: Module has no attribute "a"
+reveal_type(mod_any)  # E: Revealed type is 'Any'
+
+[file m.py]
+a = 'foo'
+
+[builtins fixtures/module.pyi]
+
+[case testModuleAliasPassedToFunction]
+import types
+import m
+
+def takes_module(x: types.ModuleType):
+    reveal_type(x.__file__)  # E: Revealed type is 'builtins.str'
+
+n = m
+takes_module(m)
+takes_module(n)
+
+[file m.py]
+a = 'foo'
+
+[builtins fixtures/module.pyi]
+
+[case testModuleAliasRepeated]
+import m, n
+
+if bool():
+    x = m
+else:
+    x = 3  # E: Incompatible types in assignment (expression has type "int", variable has type Module)
+
+if bool():
+    y = 3
+else:
+    y = m  # E: Incompatible types in assignment (expression has type Module, variable has type "int")
+
+if bool():
+    z = m
+else:
+    z = n  # E: Cannot assign multiple modules to name 'z' without explicit 'types.ModuleType' annotation
+
+[file m.py]
+a = 'foo'
+
+[file n.py]
+a = 3
+
+[builtins fixtures/module.pyi]
+
+[case testModuleAliasRepeatedWithAnnotation]
+import types
+import m, n
+
+x: types.ModuleType
+if bool():
+    x = m
+else:
+    x = n
+
+x.a  # E: Module has no attribute "a"
+reveal_type(x.__file__)  # E: Revealed type is 'builtins.str'
+
+[file m.py]
+a = 'foo'
+
+[file n.py]
+a = 3
+
+[builtins fixtures/module.pyi]
+
+[case testModuleAliasRepeatedComplex]
+import m, n, o
+
+x = m
+x = n  # E: Cannot assign multiple modules to name 'x' without explicit 'types.ModuleType' annotation
+x = o  # E: Cannot assign multiple modules to name 'x' without explicit 'types.ModuleType' annotation
+
+y = o
+y, z = m, n  # E: Cannot assign multiple modules to name 'y' without explicit 'types.ModuleType' annotation
+
+xx = m
+xx = m
+reveal_type(xx.a)  # E: Revealed type is 'builtins.str'
+
+[file m.py]
+a = 'foo'
+
+[file n.py]
+a = 3
+
+[file o.py]
+a = 'bar'
+
+[builtins fixtures/module.pyi]
+
+[case testModuleAliasToOtherModule]
+import m, n
+m = n  # E: Cannot assign multiple modules to name 'm' without explicit 'types.ModuleType' annotation
+
+[file m.py]
+
+[file n.py]
+
+[builtins fixtures/module.pyi]
diff --git a/test-data/unit/check-multiple-inheritance.test b/test-data/unit/check-multiple-inheritance.test
new file mode 100644
index 0000000..678ccad
--- /dev/null
+++ b/test-data/unit/check-multiple-inheritance.test
@@ -0,0 +1,242 @@
+-- Test cases for multiple inheritance.
+--
+-- Related: check-abstract.test
+
+
+-- No name collisions
+-- ------------------
+
+
+[case testSimpleMultipleInheritanceAndMethods]
+import typing
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def g(self, x: str) -> None: pass
+class C(A, B): pass
+c = C()
+c.f(1)
+c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+c.g('')
+c.g(1)  # E: Argument 1 to "g" of "B" has incompatible type "int"; expected "str"
+
+[case testSimpleMultipleInheritanceAndMethods2]
+import typing
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def g(self, x): pass
+class C(A, B): pass
+c = C()
+c.f(1)
+c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+c.g('')
+c.g(1)
+
+[case testSimpleMultipleInheritanceAndInstanceVariables]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 1
+class B:
+    def g(self) -> None:
+        self.y = ''
+class C(A, B): pass
+c = C()
+c.x = 1
+c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+c.y = ''
+c.y = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testSimpleMultipleInheritanceAndInstanceVariableInClassBody]
+import typing
+class A:
+    x = 1
+class B:
+    y = ''
+class C(A, B): pass
+c = C()
+c.x = 1
+c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+c.y = ''
+c.y = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testSimpleMultipleInheritanceAndClassVariable]
+import typing
+class A:
+    x = 1
+class B:
+    y = ''
+class C(A, B): pass
+C.x = 1
+C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+C.y = ''
+C.y = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+
+-- Name collisions
+-- ---------------
+
+
+[case testMethodNameCollisionInMultipleInheritanceWithValidSigs]
+import typing
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def f(self, x: int) -> None: pass
+class C(A, B): pass
+c = C()
+c.f(1)
+c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testInstanceVarNameOverlapInMultipleInheritanceWithCompatibleTypes]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 1
+class B:
+    def g(self) -> None:
+        self.x = 1
+class C(A, B): pass
+c = C()
+c.x = 1
+c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testClassVarNameOverlapInMultipleInheritanceWithCompatibleTypes]
+import typing
+class A:
+    x = 1
+class B:
+    x = 1
+class C(A, B): pass
+c = C()
+c.x = 1
+c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+C.x = 1
+C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testMethodNameCollisionInMultipleInheritanceWithIncompatibleSigs]
+import typing
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def f(self, x: str) -> None: pass
+class C(A, B): pass
+[out]
+main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
+
+[case testMethodNameCollisionInMultipleInheritanceWithIncompatibleSigs2]
+import typing
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def f(self, x, y): pass
+class C(A, B): pass
+class D(B, A): pass
+[out]
+main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
+main:7: error: Definition of "f" in base class "B" is incompatible with definition in base class "A"
+
+
+[case testMethodOverridingWithBothDynamicallyAndStaticallyTypedMethods]
+class A:
+    def f(self) -> int: pass
+class B:
+    def f(self): pass
+class C(B, A): pass
+class D(A, B): pass
+[out]
+
+[case testInstanceVarNameOverlapInMultipleInheritanceWithInvalidTypes]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 1
+class B:
+    def g(self) -> None:
+        self.x = ''
+class C(A, B): pass
+[out]
+main:8: error: Definition of "x" in base class "A" is incompatible with definition in base class "B"
+
+[case testClassVarNameOverlapInMultipleInheritanceWithInvalidTypes]
+import typing
+class A:
+    x = 1
+class B:
+    x = ''
+class C(A, B): pass
+[out]
+main:6: error: Definition of "x" in base class "A" is incompatible with definition in base class "B"
+
+[case testMethodOverlapsWithClassVariableInMultipleInheritance]
+from typing import Callable
+class A:
+    def f(self) -> None: pass
+class B:
+    f = ''
+class C(A, B): pass
+[out]
+main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
+
+[case testMethodOverlapsWithInstanceVariableInMultipleInheritance]
+from typing import Callable
+class A:
+    def f(self) -> None: pass
+class B:
+    def g(self) -> None:
+        self.f = ''
+class C(A, B): pass
+[out]
+main:7: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
+
+[case testMultipleInheritanceAndInit]
+import typing
+class A:
+    def __init__(self, x: int) -> None: pass
+class B:
+    def __init__(self) -> None: pass
+class C(A, B): pass
+
+[case testMultipleInheritanceAndDifferentButCompatibleSignatures]
+class A:
+    def clear(self): pass
+
+class B:
+    def clear(self, x=None): pass
+
+class C(B, A): pass
+class D(A, B): pass
+[out]
+main:8: error: Definition of "clear" in base class "A" is incompatible with definition in base class "B"
+
+
+-- Special cases
+-- -------------
+
+
+[case testGenericInheritanceAndOverridingWithMultipleInheritance]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+class G(Generic[T]):
+    def f(self, s: int) -> 'G[T]': pass
+class A(G[int]):
+    def f(self, s: int) -> 'A': pass
+class B(A, int): pass
+
+[case testCannotDetermineTypeInMultipleInheritance]
+from typing import Callable, TypeVar
+T = TypeVar('T')
+class A(B, C):
+    def f(self): pass
+class B:
+    @dec
+    def f(self): pass
+class C:
+    @dec
+    def f(self): pass
+def dec(f: Callable[..., T]) -> Callable[..., T]:
+    return f
+[out]
+main:3: error: Cannot determine type of 'f' in base class 'B'
+main:3: error: Cannot determine type of 'f' in base class 'C'
diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test
new file mode 100644
index 0000000..7d313da
--- /dev/null
+++ b/test-data/unit/check-namedtuple.test
@@ -0,0 +1,467 @@
+[case testNamedTupleUsedAsTuple]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+a, b = x
+b = x[0]
+a = x[1]
+a, b, c = x # E: Need more than 2 values to unpack (3 expected)
+x[2] # E: Tuple index out of range
+
+[case testNamedTupleWithTupleFieldNamesUsedAsTuple]
+from collections import namedtuple
+
+X = namedtuple('X', ('x', 'y'))
+x = None  # type: X
+a, b = x
+b = x[0]
+a = x[1]
+a, b, c = x # E: Need more than 2 values to unpack (3 expected)
+x[2] # E: Tuple index out of range
+
+[case testNamedTupleNoUnderscoreFields]
+from collections import namedtuple
+
+X = namedtuple('X', 'x, _y, _z')  # E: namedtuple() field names cannot start with an underscore: _y, _z
+
+[case testNamedTupleAccessingAttributes]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+x.x
+x.y
+x.z # E: "X" has no attribute "z"
+
+
+[case testNamedTupleAttributesAreReadOnly]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+x.x = 5 # E: Property "x" defined in "X" is read-only
+x.y = 5 # E: Property "y" defined in "X" is read-only
+x.z = 5 # E: "X" has no attribute "z"
+
+class A(X): pass
+a = None  # type: A
+a.x = 5 # E: Property "x" defined in "A" is read-only
+a.y = 5 # E: Property "y" defined in "A" is read-only
+-- a.z = 5 # not supported yet
+
+
+[case testNamedTupleCreateWithPositionalArguments]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = X(1, 'x')
+x.x
+x.z      # E: "X" has no attribute "z"
+x = X(1) # E: Too few arguments for "X"
+x = X(1, 2, 3)  # E: Too many arguments for "X"
+
+[case testCreateNamedTupleWithKeywordArguments]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = X(x=1, y='x')
+x = X(1, y='x')
+x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X"
+x = X(y=1) # E: Missing positional argument "x" in call to "X"
+
+
+[case testNamedTupleCreateAndUseAsTuple]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = X(1, 'x')
+a, b = x
+a, b, c = x  # E: Need more than 2 values to unpack (3 expected)
+
+
+[case testNamedTupleWithItemTypes]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int),
+                     ('b', str)])
+n = N(1, 'x')
+s = n.a # type: str  # E: Incompatible types in assignment (expression has type "int", \
+                          variable has type "str")
+i = n.b # type: int  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+x, y = n
+x = y  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+
+[case testNamedTupleWithTupleFieldNamesWithItemTypes]
+from typing import NamedTuple
+N = NamedTuple('N', (('a', int),
+                     ('b', str)))
+n = N(1, 'x')
+s = n.a # type: str  # E: Incompatible types in assignment (expression has type "int", \
+                          variable has type "str")
+i = n.b # type: int  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+x, y = n
+x = y  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+
+[case testNamedTupleConstructorArgumentTypes]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int),
+                     ('b', str)])
+n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "int"
+n = N(1, b=2)   # E: Argument 2 to "N" has incompatible type "int"; expected "str"
+N(1, 'x')
+N(b='x', a=1)
+
+[case testNamedTupleAsBaseClass]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int),
+                     ('b', str)])
+class X(N):
+    pass
+x = X(1, 2)  # E: Argument 2 to "X" has incompatible type "int"; expected "str"
+s = ''
+i = 0
+s = x.a  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+i, s = x
+s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testNamedTupleAsBaseClass2]
+from typing import NamedTuple
+class X(NamedTuple('N', [('a', int),
+                         ('b', str)])):
+    pass
+x = X(1, 2)  # E: Argument 2 to "X" has incompatible type "int"; expected "str"
+s = ''
+i = 0
+s = x.a  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+i, s = x
+s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+
+[case testNamedTuplesTwoAsBaseClasses]
+from typing import NamedTuple
+A = NamedTuple('A', [('a', int)])
+B = NamedTuple('B', [('a', int)])
+class X(A, B):  # E: Class has two incompatible bases derived from tuple
+    pass
+
+
+[case testNamedTuplesTwoAsBaseClasses2]
+from typing import NamedTuple
+A = NamedTuple('A', [('a', int)])
+class X(A, NamedTuple('B', [('a', int)])): # E: Class has two incompatible bases derived from tuple
+    pass
+
+
+[case testNamedTupleSelfTypeWithNamedTupleAsBase]
+from typing import NamedTuple
+A = NamedTuple('A', [('a', int), ('b', str)])
+class B(A):
+    def f(self, x: int) -> None:
+        self.f(self.a)
+        self.f(self.b)  # E: Argument 1 to "f" of "B" has incompatible type "str"; expected "int"
+        i = 0
+        s = ''
+        i, s = self
+        i, i = self  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+
+
+[out]
+
+[case testNamedTupleTypeReferenceToClassDerivedFrom]
+from typing import NamedTuple
+A = NamedTuple('A', [('a', int), ('b', str)])
+class B(A):
+    def f(self, x: 'B') -> None:
+        i = 0
+        s = ''
+        self = x
+        i, s = x
+        i, s = x.a, x.b
+        i, s = x.a, x.a  # E: Incompatible types in assignment (expression has type "int", \
+                              variable has type "str")
+        i, i = self  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+
+[out]
+
+[case testNamedTupleSubtyping]
+from typing import NamedTuple, Tuple
+A = NamedTuple('A', [('a', int), ('b', str)])
+class B(A): pass
+a = A(1, '')
+b = B(1, '')
+t = None  # type: Tuple[int, str]
+b = a  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = t  # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "A")
+b = t  # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "B")
+t = a
+t = (1, '')
+t = b
+a = b
+
+
+[case testNamedTupleSimpleTypeInference]
+from typing import NamedTuple, Tuple
+A = NamedTuple('A', [('a', int)])
+l = [A(1), A(2)]
+a = A(1)
+a = l[0]
+(i,) = l[0]
+i, i = l[0]  # E: Need more than 1 value to unpack (2 expected)
+l = [A(1)]
+a = (1,)  # E: Incompatible types in assignment (expression has type "Tuple[int]", \
+               variable has type "A")
+[builtins fixtures/list.pyi]
+
+[case testNamedTupleMissingClassAttribute]
+import collections
+MyNamedTuple = collections.namedtuple('MyNamedTuple', ['spam', 'eggs'])
+MyNamedTuple.x # E: Type[MyNamedTuple] has no attribute "x"
+
+
+[case testNamedTupleEmptyItems]
+from typing import NamedTuple
+A = NamedTuple('A', [])
+
+
+[case testNamedTupleProperty]
+from typing import NamedTuple
+A = NamedTuple('A', [('a', int)])
+class B(A):
+    @property
+    def b(self) -> int:
+        return self.a
+class C(B): pass
+B(1).b
+C(2).b
+
+[builtins fixtures/property.pyi]
+
+[case testNamedTupleAsDict]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+reveal_type(x._asdict())  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+
+[builtins fixtures/dict.pyi]
+
+[case testNamedTupleReplace]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+reveal_type(x._replace())  # E: Revealed type is 'Tuple[Any, Any, fallback=__main__.X]'
+x._replace(y=5)
+x._replace(x=3)
+x._replace(x=3, y=5)
+x._replace(z=5)  # E: Unexpected keyword argument "z" for X._replace
+x._replace(5)  # E: Too many positional arguments for X._replace
+
+[case testNamedTupleReplaceAsClass]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+X._replace(x, x=1, y=2)
+X._replace(x=1, y=2)  # E: Missing positional argument "self" in call to X._replace
+
+
+[case testNamedTupleReplaceTyped]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+x = None  # type: X
+reveal_type(x._replace())  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
+x._replace(x=5)
+x._replace(y=5)  # E: Argument 1 to X._replace has incompatible type "int"; expected "str"
+
+[case testNamedTupleMake]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+reveal_type(X._make([5, 'a']))  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
+X._make('a b')  # E: Argument 1 to X._make has incompatible type "str"; expected Iterable[Any]
+
+-- # FIX: not a proper class method
+-- x = None  # type: X
+-- reveal_type(x._make([5, 'a']))  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
+-- x._make('a b')  # E: Argument 1 to X._make has incompatible type "str"; expected Iterable[Any]
+
+[builtins fixtures/list.pyi]
+
+[case testNamedTupleFields]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+reveal_type(X._fields)  # E: Revealed type is 'Tuple[builtins.str, builtins.str]'
+
+[case testNamedTupleSource]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+reveal_type(X._source)  # E: Revealed type is 'builtins.str'
+x = None  # type: X
+reveal_type(x._source)  # E: Revealed type is 'builtins.str'
+
+[case testNamedTupleUnit]
+from typing import NamedTuple
+
+X = NamedTuple('X', [])
+x = X()  # type: X
+x._replace()
+x._fields[0]  # E: Tuple index out of range
+
+[case testNamedTupleJoinNamedTuple]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+Y = NamedTuple('Y', [('x', int), ('y', str)])
+reveal_type([X(3, 'b'), Y(1, 'a')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+
+[builtins fixtures/list.pyi]
+
+[case testNamedTupleJoinTuple]
+from typing import NamedTuple, Tuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+reveal_type([(3, 'b'), X(1, 'a')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+reveal_type([X(1, 'a'), (3, 'b')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+
+[builtins fixtures/list.pyi]
+
+[case testNamedTupleFieldTypes]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+reveal_type(X._field_types)  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+x = None  # type: X
+reveal_type(x._field_types)  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+
+[builtins fixtures/dict.pyi]
+
+[case testNamedTupleAndOtherSuperclass]
+from typing import NamedTuple
+
+class A: pass
+def f(x: A) -> None: pass
+
+class B(NamedTuple('B', []), A): pass
+f(B())
+x = None  # type: A
+x = B()
+
+# Sanity check: fail if baseclass does not match
+class C: pass
+def g(x: C) -> None: pass
+class D(NamedTuple('D', []), A): pass
+
+g(D())  # E: Argument 1 to "g" has incompatible type "D"; expected "C"
+y = None  # type: C
+y = D()  # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+
+[case testNamedTupleSelfTypeMethod]
+from typing import TypeVar, NamedTuple
+
+T = TypeVar('T', bound='A')
+
+class A(NamedTuple('A', [('x', str)])):
+    def member(self: T) -> T:
+        return self
+
+class B(A):
+    pass
+
+a = None  # type: A
+a = A('').member()
+b = None  # type: B
+b = B('').member()
+a = B('')
+a = B('').member()
+
+[case testNamedTupleSelfTypeReplace]
+from typing import NamedTuple, TypeVar
+A = NamedTuple('A', [('x', str)])
+reveal_type(A('hello')._replace(x=''))  # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.A]'
+a = None  # type: A
+a = A('hello')._replace(x='')
+
+class B(A):
+    pass
+
+reveal_type(B('hello')._replace(x=''))  # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]'
+b = None  # type: B
+b = B('hello')._replace(x='')
+
+[case testNamedTupleSelfTypeMake]
+from typing import NamedTuple, TypeVar
+A = NamedTuple('A', [('x', str)])
+reveal_type(A._make(['']))  # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.A]'
+a = A._make([''])  # type: A
+
+class B(A):
+    pass
+
+reveal_type(B._make(['']))  # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]'
+b = B._make([''])  # type: B
+
+[builtins fixtures/list.pyi]
+
+[case testNamedTupleIncompatibleRedefinition]
+from typing import NamedTuple
+class Crash(NamedTuple):
+    count: int  # E: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as Callable[[Tuple[Any, ...], Any], int])
+[builtins fixtures/tuple.pyi]
+
+[case testNamedTupleInClassNamespace]
+# https://github.com/python/mypy/pull/2553#issuecomment-266474341
+from typing import NamedTuple
+class C:
+    def f(self):
+        A = NamedTuple('A', [('x', int)])
+    def g(self):
+        A = NamedTuple('A', [('y', int)])
+C.A  # E: Type[C] has no attribute "A"
+
+[case testNamedTupleInFunction]
+from typing import NamedTuple
+def f() -> None:
+    A = NamedTuple('A', [('x', int)])
+A  # E: Name 'A' is not defined
+
+[case testNamedTupleWithImportCycle]
+import a
+[file a.py]
+from collections import namedtuple
+from b import f
+
+N = namedtuple('N', 'a')
+
+class X(N): pass
+[file b.py]
+import a
+
+def f(x: a.X) -> None:
+    # The type of x is broken (https://github.com/python/mypy/issues/3016) but we need to
+    # do something reasonable here to avoid a regression.
+    reveal_type(x)
+    x = a.X(1)
+    reveal_type(x)
+[out]
+tmp/b.py:6: error: Revealed type is 'a.X'
+tmp/b.py:8: error: Revealed type is 'Tuple[Any, fallback=a.X]'
+
+[case testForwardReferenceInNamedTuple]
+from typing import NamedTuple
+
+class A(NamedTuple):
+    b: 'B'
+    x: int
+
+class B:
+    pass
diff --git a/test-data/unit/check-newsyntax.test b/test-data/unit/check-newsyntax.test
new file mode 100644
index 0000000..645fbe5
--- /dev/null
+++ b/test-data/unit/check-newsyntax.test
@@ -0,0 +1,153 @@
+[case testNewSyntaxRequire36]
+# flags: --python-version 3.5
+x: int = 5  # E: Variable annotation syntax is only supported in Python 3.6 and greater
+[out]
+
+[case testNewSyntaxSyntaxError]
+# flags: --python-version 3.6
+x: int: int  # E: invalid syntax
+[out]
+
+[case testNewSyntaxBasics]
+# flags: --python-version 3.6
+x: int
+x = 5
+y: int = 5
+
+a: str
+a = 5  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+b: str = 5  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+zzz: int
+zzz: str  # E: Name 'zzz' already defined
+[out]
+
+[case testNewSyntaxWithDict]
+# flags: --python-version 3.6
+from typing import Dict, Any
+
+d: Dict[int, str] = {}
+d[42] = 'ab'
+d[42] = 42  # E: Incompatible types in assignment (expression has type "int", target has type "str")
+d['ab'] = 'ab'  # E: Invalid index type "str" for Dict[int, str]; expected type "int"
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testNewSyntaxWithRevealType]
+# flags: --python-version 3.6
+from typing import Dict
+
+def tst_local(dct: Dict[int, T]) -> Dict[T, int]:
+    ret: Dict[T, int] = {}
+    return ret
+
+reveal_type(tst_local({1: 'a'}))  # E: Revealed type is 'builtins.dict[builtins.str*, builtins.int]'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testNewSyntaxWithInstanceVars]
+# flags: --python-version 3.6
+class TstInstance:
+    a: str
+    def __init__(self) -> None:
+        self.x: int
+
+TstInstance().x = 5
+TstInstance().x = 'ab'  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+TstInstance().a = 5  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+TstInstance().a = 'ab'
+[out]
+
+[case testNewSyntaxWithClassVars]
+# flags: --strict-optional --python-version 3.6
+class CCC:
+    a: str = None  # E: Incompatible types in assignment (expression has type None, variable has type "str")
+[out]
+
+[case testNewSyntaxWithStrictOptional]
+# flags: --strict-optional --python-version 3.6
+strict: int
+strict = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+strict2: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+[out]
+
+[case testNewSyntaxWithStrictOptionalFunctions]
+# flags: --strict-optional --python-version 3.6
+def f() -> None:
+    x: int
+    x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+[out]
+
+[case testNewSyntaxWithStrictOptionalClasses]
+# flags: --strict-optional --python-version 3.6
+class C:
+    def meth(self) -> None:
+        x: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+        self.x: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+[out]
+
+[case testNewSyntaxSpecialAssign]
+# flags: --python-version 3.6
+class X:
+    x: str
+    x[0]: int
+    x.x: int
+
+[out]
+main:4: error: Unexpected type declaration
+main:4: error: Unsupported target for indexed assignment
+main:5: error: Type cannot be declared in assignment to non-self attribute
+main:5: error: "str" has no attribute "x"
+
+[case testNewSyntaxAsyncComprehensionError]
+# flags: --python-version 3.5
+async def f():
+    results = [i async for i in aiter() if i % 2]  # E: Async comprehensions are only supported in Python 3.6 and greater
+
+
+[case testNewSyntaxFstringError]
+# flags: --python-version 3.5
+f''  # E: Format strings are only supported in Python 3.6 and greater
+
+[case testNewSyntaxFStringBasics]
+# flags: --python-version 3.6
+f'foobar'
+f'{"foobar"}'
+f'foo{"bar"}'
+f'.{1}.'
+f'{type(1)}'
+a: str
+a = f'foobar'
+a = f'{"foobar"}'
+[builtins fixtures/f_string.pyi]
+
+[case testNewSyntaxFStringExpressionsOk]
+# flags: --python-version 3.6
+f'.{1 + 1}.'
+f'.{1 + 1}.{"foo" + "bar"}'
+[builtins fixtures/f_string.pyi]
+
+[case testNewSyntaxFStringExpressionsErrors]
+# flags: --python-version 3.6
+f'{1 + ""}'
+f'.{1 + ""}'
+[builtins fixtures/f_string.pyi]
+[out]
+main:2: error: Unsupported operand types for + ("int" and "str")
+main:3: error: Unsupported operand types for + ("int" and "str")
+
+[case testNewSyntaxFStringParseFormatOptions]
+# flags: --python-version 3.6
+value = 10.5142
+width = 10
+precision = 4
+f'result: {value:{width}.{precision}}'
+[builtins fixtures/f_string.pyi]
+
+[case testNewSyntaxFStringSingleField]
+# flags: --python-version 3.6
+v = 1
+reveal_type(f'{v}') # E: Revealed type is 'builtins.str'
+reveal_type(f'{1}') # E: Revealed type is 'builtins.str'
+[builtins fixtures/f_string.pyi]
+
diff --git a/test-data/unit/check-newtype.test b/test-data/unit/check-newtype.test
new file mode 100644
index 0000000..2af1fbf
--- /dev/null
+++ b/test-data/unit/check-newtype.test
@@ -0,0 +1,346 @@
+-- Checks NewType(...)
+
+-- Checks for basic functionality
+
+[case testNewTypePEP484Example1]
+from typing import NewType
+
+UserId = NewType('UserId', int)
+
+def name_by_id(user_id: UserId) -> str:
+    return "foo"
+
+UserId('user')  # E: Argument 1 to "UserId" has incompatible type "str"; expected "int"
+name_by_id(42)  # E: Argument 1 to "name_by_id" has incompatible type "int"; expected "UserId"
+name_by_id(UserId(42))
+
+id = UserId(5)
+num = id + 1
+
+reveal_type(id)  # E: Revealed type is '__main__.UserId'
+reveal_type(num)  # E: Revealed type is 'builtins.int'
+[out]
+
+[case testNewTypePEP484Example2]
+from typing import NewType
+
+class PacketId:
+    def __init__(self, major: int, minor: int) -> None:
+        self._major = major
+        self._minor = minor
+
+TcpPacketId = NewType('TcpPacketId', PacketId)
+
+packet = PacketId(100, 100)
+tcp_packet = TcpPacketId(packet)
+tcp_packet = TcpPacketId(127, 0)
+
+[out]
+main:12: error: Too many arguments for "TcpPacketId"
+main:12: error: Argument 1 to "TcpPacketId" has incompatible type "int"; expected "PacketId"
+
+[case testNewTypeWithTuples]
+from typing import NewType, Tuple
+TwoTuple = NewType('TwoTuple', Tuple[int, str])
+a = TwoTuple((3, "a"))
+b = TwoTuple(("a", 3))  # E: Argument 1 to "TwoTuple" has incompatible type "Tuple[str, int]"; expected "Tuple[int, str]"
+
+reveal_type(a[0])  # E: Revealed type is 'builtins.int'
+reveal_type(a[1])  # E: Revealed type is 'builtins.str'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testNewTypeWithLists]
+from typing import NewType, List
+UserId = NewType('UserId', int)
+IdList = NewType('IdList', List[UserId])
+
+bad1 = IdList([1])  # E: List item 0 has incompatible type "int"
+
+foo = IdList([])
+foo.append(3)  # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "UserId"
+foo.append(UserId(3))
+foo.extend([UserId(1), UserId(2), UserId(3)])
+foo.extend(IdList([UserId(1), UserId(2), UserId(3)]))
+bar = IdList([UserId(2)])
+
+baz = foo + bar
+reveal_type(foo)  # E: Revealed type is '__main__.IdList'
+reveal_type(bar)  # E: Revealed type is '__main__.IdList'
+reveal_type(baz)  # E: Revealed type is 'builtins.list[__main__.UserId*]'
+
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNewTypeWithGenerics]
+from typing import TypeVar, Generic, NewType, Any
+
+T = TypeVar('T')
+
+class Base(Generic[T]):
+    def __init__(self, item: T) -> None:
+        self.item = item
+
+    def getter(self) -> T:
+        return self.item
+
+Derived1 = NewType('Derived1', Base[str])
+Derived2 = NewType('Derived2', Base)       # Implicit 'Any'
+Derived3 = NewType('Derived3', Base[Any])  # Explicit 'Any'
+
+Derived1(Base(1))  # E: Argument 1 to "Base" has incompatible type "int"; expected "str"
+Derived1(Base('a'))
+Derived2(Base(1))
+Derived2(Base('a'))
+Derived3(Base(1))
+Derived3(Base('a'))
+
+reveal_type(Derived1(Base('a')).getter())  # E: Revealed type is 'builtins.str*'
+reveal_type(Derived3(Base('a')).getter())  # E: Revealed type is 'Any'
+[out]
+
+[case testNewTypeWithNamedTuple]
+from collections import namedtuple
+from typing import NewType, NamedTuple
+
+Vector1 = namedtuple('Vector1', ['x', 'y'])
+Point1 = NewType('Point1', Vector1)
+p1 = Point1(Vector1(1, 2))
+reveal_type(p1.x)  # E: Revealed type is 'Any'
+reveal_type(p1.y)  # E: Revealed type is 'Any'
+
+Vector2 = NamedTuple('Vector2', [('x', int), ('y', int)])
+Point2 = NewType('Point2', Vector2)
+p2 = Point2(Vector2(1, 2))
+reveal_type(p2.x)  # E: Revealed type is 'builtins.int'
+reveal_type(p2.y)  # E: Revealed type is 'builtins.int'
+
+class Vector3:
+    def __init__(self, x: int, y: int) -> None:
+        self.x = x
+        self.y = y
+Point3 = NewType('Point3', Vector3)
+p3 = Point3(Vector3(1, 3))
+reveal_type(p3.x)  # E: Revealed type is 'builtins.int'
+reveal_type(p3.y)  # E: Revealed type is 'builtins.int'
+[out]
+
+[case testNewTypeWithCasts]
+from typing import NewType, cast
+UserId = NewType('UserId', int)
+foo = UserId(3)
+foo = cast(UserId, 3)
+foo = cast(UserId, "foo")
+foo = cast(UserId, UserId(4))
+[out]
+
+[case testNewTypeWithTypeAliases]
+from typing import NewType
+Foo = int
+Bar = NewType('Bar', Foo)
+Bar2 = Bar
+
+def func1(x: Foo) -> Bar:
+    return Bar(x)
+
+def func2(x: int) -> Bar:
+    return Bar(x)
+
+def func3(x: Bar2) -> Bar:
+    return x
+
+x = Bar(42)
+y = Bar2(42)
+
+y = func3(x)
+[out]
+
+[case testNewTypeWithNewType]
+from typing import NewType
+A = NewType('A', int)
+B = NewType('B', A)
+C = A
+D = C
+E = NewType('E', D)
+
+a = A(1)
+b = B(a)
+e = E(a)
+
+def funca(a: A) -> None: ...
+def funcb(b: B) -> None: ...
+
+funca(a)
+funca(b)
+funca(e)
+funcb(a)  # E: Argument 1 to "funcb" has incompatible type "A"; expected "B"
+funcb(b)
+funcb(e)  # E: Argument 1 to "funcb" has incompatible type "E"; expected "B"
+
+[out]
+
+-- Make sure NewType works as expected in a variety of different scopes/across files
+
+[case testNewTypeInLocalScope]
+from typing import NewType
+A = NewType('A', int)
+a = A(3)
+
+def func() -> None:
+    A = NewType('A', str)
+    B = NewType('B', str)
+
+    a = A(3)  # E: Argument 1 to "A" has incompatible type "int"; expected "str"
+    a = A('xyz')
+    b = B('xyz')
+
+class MyClass:
+    C = NewType('C', float)
+
+    def foo(self) -> 'MyClass.C':
+        return MyClass.C(3.2)
+
+b = A(3)
+c = MyClass.C(3.5)
+[out]
+
+[case testNewTypeInMultipleFiles]
+import a
+import b
+list1 = [a.UserId(1), a.UserId(2)]
+list1.append(b.UserId(3))  # E: Argument 1 to "append" of "list" has incompatible type "b.UserId"; expected "a.UserId"
+
+[file a.py]
+from typing import NewType
+UserId = NewType('UserId', int)
+
+[file b.py]
+from typing import NewType
+UserId = NewType('UserId', int)
+
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNewTypeWithIncremental]
+import m
+
+[file m.py]
+from typing import NewType
+
+UserId = NewType('UserId', int)
+
+def name_by_id(user_id: UserId) -> str:
+    return "foo"
+
+name_by_id(UserId(42))
+
+id = UserId(5)
+num = id + 1
+
+[file m.py.2]
+from typing import NewType
+
+UserId = NewType('UserId', int)
+
+def name_by_id(user_id: UserId) -> str:
+    return "foo"
+
+name_by_id(UserId(42))
+
+id = UserId(5)
+num = id + 1
+
+reveal_type(id)
+reveal_type(num)
+[rechecked m]
+[stale]
+[out1]
+[out2]
+tmp/m.py:13: error: Revealed type is 'm.UserId'
+tmp/m.py:14: error: Revealed type is 'builtins.int'
+
+
+-- Check misuses of NewType fail
+
+[case testNewTypeBadInitializationFails]
+from typing import NewType
+
+a = NewType('b', int)  # E: String argument 1 'b' to NewType(...) does not match variable name 'a'
+b = NewType('b', 3)  # E: Argument 2 to NewType(...) must be a valid type
+c = NewType(2, int)  # E: Argument 1 to NewType(...) must be a string literal
+foo = "d"
+d = NewType(foo, int)  # E: Argument 1 to NewType(...) must be a string literal
+e = NewType(name='e', tp=int)  # E: NewType(...) expects exactly two positional arguments
+f = NewType('f', tp=int)  # E: NewType(...) expects exactly two positional arguments
+[out]
+
+[case testNewTypeWithAnyFails]
+from typing import NewType, Any
+A = NewType('A', Any)  # E: Argument 2 to NewType(...) must be subclassable (got Any)
+[out]
+
+[case testNewTypeWithUnionsFails]
+from typing import NewType, Union
+Foo = NewType('Foo', Union[int, float])  # E: Argument 2 to NewType(...) must be subclassable (got Union[builtins.int, builtins.float])
+[out]
+
+[case testNewTypeWithTypeTypeFails]
+from typing import NewType, Type
+Foo = NewType('Foo', Type[int])  # E: Argument 2 to NewType(...) must be subclassable (got Type[builtins.int])
+a = Foo(type(3))
+[builtins fixtures/args.pyi]
+[out]
+
+[case testNewTypeWithTypeVarsFails]
+from typing import NewType, TypeVar, List
+T = TypeVar('T')
+A = NewType('A', T)
+B = NewType('B', List[T])
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Argument 2 to NewType(...) must be subclassable (got T?)
+main:3: error: Invalid type "__main__.T"
+main:4: error: Invalid type "__main__.T"
+
+[case testNewTypeRedefiningVariablesFails]
+from typing import NewType
+
+a = 3
+a = NewType('a', int)
+
+b = NewType('b', int)
+b = NewType('b', float)  # this line throws two errors
+
+c = NewType('c', str)   # type: str
+[out]
+main:4: error: Cannot redefine 'a' as a NewType
+main:7: error: Cannot assign to a type
+main:7: error: Cannot redefine 'b' as a NewType
+main:9: error: Cannot declare the type of a NewType declaration
+
+[case testNewTypeAddingExplicitTypesFails]
+from typing import NewType
+UserId = NewType('UserId', int)
+
+a = 3  # type: UserId  # E: Incompatible types in assignment (expression has type "int", variable has type "UserId")
+[out]
+
+[case testNewTypeTestSubclassingFails]
+from typing import NewType
+class A: pass
+B = NewType('B', A)
+class C(B): pass  # E: Cannot subclass NewType
+[out]
+
+[case testNewTypeAny]
+from typing import NewType
+Any = NewType('Any', int)
+Any(5)
+
+[case testNewTypeAndIsInstance]
+from typing import NewType
+T = NewType('T', int)
+d: object
+if isinstance(d, T):  # E: Cannot use isinstance() with a NewType type
+    reveal_type(d) # E: Revealed type is '__main__.T'
+[builtins fixtures/isinstancelist.pyi]
diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test
new file mode 100644
index 0000000..ee05524
--- /dev/null
+++ b/test-data/unit/check-optional.test
@@ -0,0 +1,615 @@
+-- Tests for strict Optional behavior
+
+[case testImplicitNoneType]
+x = None
+x()  # E: None not callable
+
+[case testExplicitNoneType]
+x = None  # type: None
+x()  # E: None not callable
+
+[case testNoneMemberOfOptional]
+from typing import Optional
+x = None  # type: Optional[int]
+
+[case testTypeMemberOfOptional]
+from typing import Optional
+x = 0  # type: Optional[int]
+
+[case testNoneNotMemberOfType]
+x = None  # type: int
+[out]
+main:1: error: Incompatible types in assignment (expression has type None, variable has type "int")
+
+[case testTypeNotMemberOfNone]
+x = 0  # type: None
+[out]
+main:1: error: Incompatible types in assignment (expression has type "int", variable has type None)
+
+[case testOptionalNotMemberOfType]
+from typing import Optional
+def f(a: int) -> None: pass
+x = None  # type:  Optional[int]
+f(x)  # E: Argument 1 to "f" has incompatible type "Optional[int]"; expected "int"
+
+[case testIsinstanceCases]
+from typing import Optional
+x = None  # type:  Optional[int]
+if isinstance(x, int):
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+else:
+  reveal_type(x)  # E: Revealed type is 'builtins.None'
+[builtins fixtures/isinstance.pyi]
+
+[case testIfCases]
+from typing import Optional
+x = None  # type:  Optional[int]
+if x:
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+else:
+  reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/bool.pyi]
+
+[case testIfNotCases]
+from typing import Optional
+x = None  # type:  Optional[int]
+if not x:
+  reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/bool.pyi]
+
+[case testIsNotNoneCases]
+from typing import Optional
+x = None  # type:  Optional[int]
+if x is not None:
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+else:
+  reveal_type(x)  # E: Revealed type is 'builtins.None'
+[builtins fixtures/bool.pyi]
+
+[case testIsNoneCases]
+from typing import Optional
+x = None  # type:  Optional[int]
+if x is None:
+  reveal_type(x)  # E: Revealed type is 'builtins.None'
+else:
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/bool.pyi]
+
+[case testAnyCanBeNone]
+from typing import Optional, Any
+x = None  # type:  Any
+if x is None:
+  reveal_type(x)  # E: Revealed type is 'builtins.None'
+else:
+  reveal_type(x)  # E: Revealed type is 'Any'
+[builtins fixtures/bool.pyi]
+
+[case testOrCases]
+from typing import Optional
+x = None  # type: Optional[str]
+y1 = x or 'a'
+reveal_type(y1)  # E: Revealed type is 'builtins.str'
+y2 = x or 1
+reveal_type(y2)  # E: Revealed type is 'Union[builtins.str, builtins.int]'
+z1 = 'a' or x
+reveal_type(z1)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+z2 = int() or x
+reveal_type(z2)  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.None]'
+
+[case testAndCases]
+from typing import Optional
+x = None  # type: Optional[str]
+y1 = x and 'b'
+reveal_type(y1)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+y2 = x and 1  # x could be '', so...
+reveal_type(y2)  # E: Revealed type is 'Union[builtins.str, builtins.None, builtins.int]'
+z1 = 'b' and x
+reveal_type(z1)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+z2 = int() and x
+reveal_type(z2)  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.None]'
+
+[case testLambdaReturningNone]
+f = lambda: None
+x = f()  # E: Function does not return a value
+
+[case testNoneArgumentType]
+def f(x: None) -> None: pass
+f(None)
+
+[case testInferOptionalFromDefaultNone]
+def f(x: int = None) -> None:
+  x + 1  # E: Unsupported left operand type for + (some union)
+f(None)
+[out]
+
+[case testNoInferOptionalFromDefaultNone]
+# flags: --no-implicit-optional
+def f(x: int = None) -> None:  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+  pass
+[out]
+
+[case testInferOptionalFromDefaultNoneComment]
+def f(x=None):
+  # type: (int) -> None
+  x + 1  # E: Unsupported left operand type for + (some union)
+f(None)
+[out]
+
+[case testNoInferOptionalFromDefaultNoneComment]
+# flags: --no-implicit-optional
+def f(x=None):  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+  # type: (int) -> None
+  pass
+[out]
+
+[case testInferOptionalType]
+x = None
+if bool():
+  # scope limit assignment
+  x = 1
+  # in scope of the assignment, x is an int
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+# out of scope of the assignment, it's an Optional[int]
+reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/bool.pyi]
+
+[case testInferOptionalTypeLocallyBound]
+x = None
+x = 1
+reveal_type(x)  # E: Revealed type is 'builtins.int'
+
+[case testInferOptionalAnyType]
+from typing import Any
+x = None
+a = None  # type: Any
+if bool():
+  x = a
+  reveal_type(x)  # E: Revealed type is 'Any'
+reveal_type(x)  # E: Revealed type is 'Union[Any, builtins.None]'
+[builtins fixtures/bool.pyi]
+
+[case testInferOptionalTypeFromOptional]
+from typing import Optional
+y = None  # type: Optional[int]
+x = None
+x = y
+reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+
+[case testInferOptionalListType]
+x = [None]
+x.append(1)  # E: Argument 1 to "append" of "list" has incompatible type "int"; expected None
+[builtins fixtures/list.pyi]
+
+[case testInferNonOptionalListType]
+x = []
+x.append(1)
+x()  # E: List[int] not callable
+[builtins fixtures/list.pyi]
+
+[case testInferOptionalDictKeyValueTypes]
+x = {None: None}
+x["bar"] = 1
+[builtins fixtures/dict.pyi]
+[out]
+main:2: error: Invalid index type "str" for Dict[None, None]; expected type None
+main:2: error: Incompatible types in assignment (expression has type "int", target has type None)
+
+[case testInferNonOptionalDictType]
+x = {}
+x["bar"] = 1
+x()  # E: Dict[str, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testNoneClassVariable]
+from typing import Optional
+class C:
+    x = None  # type: int
+    def __init__(self) -> None:
+        self.x = 0
+
+[case testNoneClassVariableInInit]
+from typing import Optional
+class C:
+    x = None  # type: int
+    def __init__(self) -> None:
+        self.x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+[out]
+
+[case testMultipleAssignmentNoneClassVariableInInit]
+from typing import Optional
+class C:
+    x, y = None, None  # type: int, str
+    def __init__(self) -> None:
+        self.x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+        self.y = None  # E: Incompatible types in assignment (expression has type None, variable has type "str")
+[out]
+
+[case testOverloadWithNone]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(x: None) -> str: pass
+ at overload
+def f(x: int) -> int: pass
+reveal_type(f(None))  # E: Revealed type is 'builtins.str'
+reveal_type(f(0))  # E: Revealed type is 'builtins.int'
+
+[case testOptionalTypeOrTypePlain]
+from typing import Optional
+def f(a: Optional[int]) -> int:
+    return a or 0
+[out]
+
+[case testOptionalTypeOrTypeTypeVar]
+from typing import Optional, TypeVar
+T = TypeVar('T')
+def f(a: Optional[T], b: T) -> T:
+    return a or b
+[out]
+
+[case testOptionalTypeOrTypeBothOptional]
+from typing import Optional
+def f(a: Optional[int], b: Optional[int]) -> None:
+    reveal_type(a or b)
+def g(a: int, b: Optional[int]) -> None:
+    reveal_type(a or b)
+[out]
+main:3: error: Revealed type is 'Union[builtins.int, builtins.None]'
+main:5: error: Revealed type is 'Union[builtins.int, builtins.None]'
+
+[case testOptionalTypeOrTypeComplexUnion]
+from typing import Union
+def f(a: Union[int, str, None]) -> None:
+    reveal_type(a or 'default')
+[out]
+main:3: error: Revealed type is 'Union[builtins.int, builtins.str]'
+
+[case testOptionalTypeOrTypeNoTriggerPlain]
+from typing import Optional
+def f(a: Optional[int], b: int) -> int:
+    return b or a
+[out]
+main:3: error: Incompatible return value type (got "Optional[int]", expected "int")
+
+[case testOptionalTypeOrTypeNoTriggerTypeVar]
+from typing import Optional, TypeVar
+T = TypeVar('T')
+def f(a: Optional[T], b: T) -> T:
+    return b or a
+[out]
+main:4: error: Incompatible return value type (got "Optional[T]", expected "T")
+
+[case testNoneOrStringIsString]
+def f() -> str:
+    a = None
+    b = ''
+    return a or b
+[out]
+
+[case testNoneOrTypeVarIsTypeVar]
+from typing import TypeVar
+T = TypeVar('T')
+def f(b: T) -> T:
+    a = None
+    return a or b
+[out]
+
+[case testYieldNothingInFunctionReturningGenerator]
+from typing import Generator
+def f() -> Generator[None, None, None]:
+    yield
+[out]
+
+[case testNoneAndStringIsNone]
+a = None
+b = "foo"
+reveal_type(a and b)  # E: Revealed type is 'builtins.None'
+
+[case testNoneMatchesObjectInOverload]
+import a
+a.f(None)
+
+[file a.pyi]
+from typing import overload
+ at overload
+def f() -> None: ...
+ at overload
+def f(o: object) -> None: ...
+
+[case testGenericSubclassReturningNone]
+from typing import Generic, TypeVar
+
+T = TypeVar('T')
+
+class Base(Generic[T]):
+  def f(self) -> T:
+    pass
+
+class SubNone(Base[None]):
+  def f(self) -> None:
+    pass
+
+class SubInt(Base[int]):
+  def f(self) -> int:
+    return 1
+
+[case testUseOfNoneReturningFunction]
+from typing import Optional
+def f() -> None:
+    pass
+
+def g(x: Optional[int]) -> int:
+  pass
+
+x = f()  # E: "f" does not return a value
+f() + 1  # E: "f" does not return a value
+g(f())  # E: "f" does not return a value
+
+[case testEmptyReturn]
+def f() -> None:
+    return
+
+[case testReturnNone]
+def f() -> None:
+    return None
+
+[case testNoneCallable]
+from typing import Callable
+def f() -> None: pass
+x = f  # type: Callable[[], None]
+
+[case testOptionalCallable]
+from typing import Callable, Optional
+T = Optional[Callable[..., None]]
+
+[case testAnyTypeInPartialTypeList]
+# flags: --check-untyped-defs
+def f(): ...
+
+def lookup_field(name, obj):
+    try:
+        pass
+    except:
+        attr = f()
+    else:
+        attr = None
+
+[case testTernaryWithNone]
+reveal_type(None if bool() else 0)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/bool.pyi]
+
+[case testListWithNone]
+reveal_type([0, None, 0])    # E: Revealed type is 'builtins.list[Union[builtins.int, builtins.None]]'
+[builtins fixtures/list.pyi]
+
+[case testOptionalWhitelistSuppressesOptionalErrors]
+# flags: --strict-optional-whitelist
+import a
+import b
+[file a.py]
+from typing import Optional
+x = None  # type: Optional[str]
+x + "foo"
+
+[file b.py]
+from typing import Optional
+x = None  # type: Optional[int]
+x + 1
+
+[case testOptionalWhitelistPermitsOtherErrors]
+# flags: --strict-optional-whitelist
+import a
+import b
+[file a.py]
+from typing import Optional
+x = None  # type: Optional[str]
+x + "foo"
+
+[file b.py]
+from typing import Optional
+x = None  # type: Optional[int]
+x + 1
+1 + "foo"
+[out]
+tmp/b.py:4: error: Unsupported operand types for + ("int" and "str")
+
+[case testOptionalWhitelistPermitsWhitelistedFiles]
+# flags: --strict-optional-whitelist **/a.py
+import a
+import b
+[file a.py]
+from typing import Optional
+x = None  # type: Optional[str]
+x + "foo"
+
+[file b.py]
+from typing import Optional
+x = None  # type: Optional[int]
+x + 1
+[out]
+tmp/a.py:3: error: Unsupported left operand type for + (some union)
+
+[case testNoneContextInference]
+from typing import Dict, List
+def f() -> List[None]:
+    return []
+def g() -> Dict[None, None]:
+    return {}
+[builtins fixtures/dict.pyi]
+
+[case testRaiseFromNone]
+raise BaseException from None
+[builtins fixtures/exception.pyi]
+
+[case testOptionalNonPartialTypeWithNone]
+from typing import Generator
+def f() -> Generator[str, None, None]: pass
+x = f()
+reveal_type(x)  # E: Revealed type is 'typing.Generator[builtins.str, builtins.None, builtins.None]'
+l = [f()]
+reveal_type(l)  # E: Revealed type is 'builtins.list[typing.Generator*[builtins.str, builtins.None, builtins.None]]'
+[builtins fixtures/list.pyi]
+
+[case testNoneListTernary]
+x = [None] if "" else [1]  # E: List item 0 has incompatible type "int"
+[builtins fixtures/list.pyi]
+
+[case testListIncompatibleErrorMessage]
+from typing import List, Callable
+
+def foo(l: List[Callable[[], str]]) -> None: pass
+def f() -> int:
+    return 42
+
+foo([f])  # E: List item 0 has incompatible type Callable[[], int]
+[builtins fixtures/list.pyi]
+
+[case testInferEqualsNotOptional]
+from typing import Optional
+x = ''  # type: Optional[str]
+if x == '<string>':
+    reveal_type(x)  # E: Revealed type is 'builtins.str'
+else:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+[builtins fixtures/ops.pyi]
+
+[case testInferEqualsNotOptionalWithUnion]
+from typing import Union
+x = ''  # type: Union[str, int, None]
+if x == '<string>':
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int]'
+else:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
+[builtins fixtures/ops.pyi]
+
+[case testInferEqualsNotOptionalWithOverlap]
+from typing import Union
+x = ''  # type: Union[str, int, None]
+if x == object():
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int]'
+else:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
+[builtins fixtures/ops.pyi]
+
+[case testInferEqualsStillOptionalWithNoOverlap]
+from typing import Optional
+x = ''  # type: Optional[str]
+if x == 0:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+else:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+[builtins fixtures/ops.pyi]
+
+[case testInferEqualsStillOptionalWithBothOptional]
+from typing import Union
+x = ''  # type: Union[str, int, None]
+y = ''  # type: Union[str, None]
+if x == y:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
+else:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
+[builtins fixtures/ops.pyi]
+
+[case testWarnNoReturnWorksWithStrictOptional]
+# flags: --warn-no-return
+def f() -> None:
+  1 + 1  # no error
+
+def g() -> int:
+  1 + 1  #
+[out]
+main:5: error: Missing return statement
+
+[case testGenericTypeAliasesOptional]
+from typing import TypeVar, Generic, Optional
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        self.x = x
+
+ONode = Optional[Node[T]]
+def f(x: T) -> ONode[T]:
+    if 1 > 0:
+        return Node(x)
+    else:
+        return None
+
+x = None # type: ONode[int]
+x = f(1)
+x = f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+x.x = 1 # E: Item "None" of "Optional[Node[int]]" has no attribute "x"
+if x is not None:
+    x.x = 1 # OK here
+
+[builtins fixtures/ops.pyi]
+
+[case testOptionalLambdaInference]
+from typing import Optional, Callable
+f = None # type: Optional[Callable[[int], None]]
+f = lambda x: None
+f(0)
+[builtins fixtures/function.pyi]
+
+[case testDontSimplifyNoneUnionsWithStrictOptional]
+from typing import Any, TypeVar, Union
+A = None  # type: Any
+class C(A): pass
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+a = None # type: Any
+
+# Test both orders
+reveal_type(u(C(), None))  # E: Revealed type is 'Union[builtins.None, __main__.C*]'
+reveal_type(u(None, C()))  # E: Revealed type is 'Union[__main__.C*, builtins.None]'
+
+reveal_type(u(a, None))  # E: Revealed type is 'Union[builtins.None, Any]'
+reveal_type(u(None, a))  # E: Revealed type is 'Union[Any, builtins.None]'
+
+reveal_type(u(1, None))  # E: Revealed type is 'Union[builtins.None, builtins.int*]'
+reveal_type(u(None, 1))  # E: Revealed type is 'Union[builtins.int*, builtins.None]'
+
+[case testOptionalAndAnyBaseClass]
+from typing import Any, Optional
+A = None  # type: Any
+class C(A):
+    pass
+x = None  # type: Optional[C]
+x.foo()  # E: Item "None" of "Optional[C]" has no attribute "foo"
+
+[case testIsinstanceAndOptionalAndAnyBase]
+from typing import Any, Optional
+
+B = None  # type: Any
+class A(B): pass
+
+def f(a: Optional[A]):
+    reveal_type(a) # E: Revealed type is 'Union[__main__.A, builtins.None]'
+    if a is not None:
+        reveal_type(a) # E: Revealed type is '__main__.A'
+    else:
+        reveal_type(a) # E: Revealed type is 'builtins.None'
+    reveal_type(a) # E: Revealed type is 'Union[__main__.A, builtins.None]'
+[builtins fixtures/isinstance.pyi]
+
+[case testFlattenOptionalUnion]
+from typing import Optional, Union
+
+x: Optional[Union[int, str]]
+reveal_type(x) # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.None]'
+y: Optional[Union[int, None]]
+reveal_type(y) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+
+[case testUnionTruthinessTracking]
+from typing import Optional, Any
+def test_or_shortcut(value: Optional[Any]) -> None:
+    if not value:
+        pass
+    if not value or value.get('foo') == 'hello':
+        pass
+[builtins fixtures/bool.pyi]
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
new file mode 100644
index 0000000..f3e5b99
--- /dev/null
+++ b/test-data/unit/check-overloading.test
@@ -0,0 +1,1159 @@
+-- Test cases for function overloading
+
+[case testTypeCheckOverloadWithImplementation]
+from typing import overload, Any
+ at overload
+def f(x: 'A') -> 'B': ...
+ at overload
+def f(x: 'B') -> 'A': ...
+
+def f(x: Any) -> Any:
+    pass
+
+reveal_type(f(A())) # E: Revealed type is '__main__.B'
+reveal_type(f(B())) # E: Revealed type is '__main__.A'
+
+class A: pass
+class B: pass
+[builtins fixtures/isinstance.pyi]
+
+[case testOverloadNeedsImplementation]
+from typing import overload, Any
+ at overload  # E: An overloaded function outside a stub file must have an implementation
+def f(x: 'A') -> 'B': ...
+ at overload
+def f(x: 'B') -> 'A': ...
+
+reveal_type(f(A())) # E: Revealed type is '__main__.B'
+reveal_type(f(B())) # E: Revealed type is '__main__.A'
+
+class A: pass
+class B: pass
+[builtins fixtures/isinstance.pyi]
+
+[case testSingleOverloadNoImplementation]
+from typing import overload, Any
+ at overload  # E: Single overload definition, multiple required
+def f(x: 'A') -> 'B': ...
+
+class A: pass
+class B: pass
+[builtins fixtures/isinstance.pyi]
+
+[case testOverloadByAnyOtherName]
+from typing import overload as rose
+from typing import Any
+ at rose
+def f(x: 'A') -> 'B': ...
+ at rose
+def f(x: 'B') -> 'A': ...
+
+def f(x: Any) -> Any:
+    pass
+
+reveal_type(f(A())) # E: Revealed type is '__main__.B'
+reveal_type(f(B())) # E: Revealed type is '__main__.A'
+
+class A: pass
+class B: pass
+[builtins fixtures/isinstance.pyi]
+
+[case testTypeCheckOverloadWithDecoratedImplementation]
+from typing import overload, Any
+
+def deco(fun): ...
+
+ at overload
+def f(x: 'A') -> 'B': ...
+ at overload
+def f(x: 'B') -> 'A': ...
+
+ at deco
+def f(x: Any) -> Any:
+    pass
+
+reveal_type(f(A())) # E: Revealed type is '__main__.B'
+reveal_type(f(B())) # E: Revealed type is '__main__.A'
+
+class A: pass
+class B: pass
+[builtins fixtures/isinstance.pyi]
+
+[case testOverloadDecoratedImplementationNotLast]
+from typing import overload, Any
+
+def deco(fun): ...
+
+ at overload
+def f(x: 'A') -> 'B': ...
+
+ at deco  # E: The implementation for an overloaded function must come last
+def f(x: Any) -> Any:
+    pass
+
+ at overload
+def f(x: 'B') -> 'A': ...
+
+class A: pass
+class B: pass
+[builtins fixtures/isinstance.pyi]
+
+[case testOverloadImplementationNotLast]
+from typing import overload, Any
+
+ at overload
+def f(x: 'A') -> 'B': ...
+
+def f(x: Any) -> Any:  # E: The implementation for an overloaded function must come last
+    pass
+
+ at overload
+def f(x: 'B') -> 'A': ...
+
+class A: pass
+class B: pass
+[builtins fixtures/isinstance.pyi]
+
+[case testDecoratedRedefinitionIsNotOverload]
+from typing import overload, Any
+
+def deco(fun): ...
+
+ at deco
+def f(x: 'A') -> 'B': ...
+ at deco  # E: Name 'f' already defined
+def f(x: 'B') -> 'A': ...
+ at deco  # E: Name 'f' already defined
+def f(x: Any) -> Any: ...
+
+class A: pass
+class B: pass
+[builtins fixtures/isinstance.pyi]
+
+[case testTypeCheckOverloadWithImplementationPy2]
+# flags: --python-version 2.7
+
+from typing import overload
+ at overload
+def f(x):
+    # type: (A) -> B
+    pass
+
+ at overload
+def f(x):
+    # type: (B) -> A
+    pass
+
+def f(x):
+    pass
+
+reveal_type(f(A()))  # E: Revealed type is '__main__.B'
+reveal_type(f(B()))  # E: Revealed type is '__main__.A'
+
+class A: pass
+class B: pass
+[builtins fixtures/isinstance.pyi]
+
+[case testTypeCheckOverloadWithImplementationError]
+from typing import overload, Any
+
+ at overload
+def f(x: 'A') -> 'B': ...
+ at overload
+def f(x: 'B') -> 'A': ...
+
+def f(x: Any) -> Any:
+    foo = 1
+    foo = "bar"  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+ at overload
+def g(x: 'A') -> 'B': ...
+ at overload
+def g(x: 'B') -> 'A': ...
+
+def g(x):
+    foo = 1
+    foo = "bar"
+
+reveal_type(f(A()))  # E: Revealed type is '__main__.B'
+reveal_type(f(B()))  # E: Revealed type is '__main__.A'
+
+class A: pass
+class B: pass
+[builtins fixtures/isinstance.pyi]
+
+[case testTypeCheckOverloadWithImplTooSpecificArg]
+from typing import overload, Any
+
+class A: pass
+class B: pass
+
+a = A()
+
+ at overload
+def f(x: 'A') -> 'B': ...
+ at overload
+def f(x: 'B') -> 'A': ...
+
+def f(x: 'A') -> Any: # E: Overloaded function implementation does not accept all possible arguments of signature 2
+    pass
+
+reveal_type(f(A())) # E: Revealed type is '__main__.B'
+reveal_type(f(B())) # E: Revealed type is '__main__.A'
+
+[builtins fixtures/isinstance.pyi]
+
+[case testTypeCheckOverloadWithImplTooSpecificRetType]
+from typing import overload, Any
+
+class A: pass
+class B: pass
+
+a = A()
+
+ at overload
+def f(x: 'A') -> 'B': ...
+ at overload
+def f(x: 'B') -> 'A': ...
+
+def f(x: Any) -> 'B': # E: Overloaded function implementation cannot produce return type of signature 2
+    return B()
+
+reveal_type(f(A())) # E: Revealed type is '__main__.B'
+reveal_type(f(B())) # E: Revealed type is '__main__.A'
+
+[builtins fixtures/isinstance.pyi]
+
+[case testTypeCheckOverloadWithImplTypeVar]
+from typing import overload, Any, TypeVar
+
+T = TypeVar('T')
+
+class A: pass
+class B: pass
+
+a = A()
+
+ at overload
+def f(x: 'A') -> 'A': ...
+ at overload
+def f(x: 'B') -> 'B': ...
+
+def f(x: T) -> T:
+    ...
+
+reveal_type(f(A())) # E: Revealed type is '__main__.A'
+reveal_type(f(B())) # E: Revealed type is '__main__.B'
+
+[builtins fixtures/isinstance.pyi]
+
+[case testTypeCheckOverloadWithImplTypeVarProblems]
+from typing import overload, Any, TypeVar
+
+T = TypeVar('T', bound='A')
+
+class A: pass
+class B: pass
+
+a = A()
+
+ at overload
+def f(x: 'A') -> 'A': ...
+ at overload
+def f(x: 'B') -> 'B': ...
+
+def f(x: Any) -> T:  # E: Type variable mismatch between overload signature 2 and implementation
+    ...
+
+reveal_type(f(A())) # E: Revealed type is '__main__.A'
+reveal_type(f(B())) # E: Revealed type is '__main__.B'
+
+[builtins fixtures/isinstance.pyi]
+
+[case testTypeCheckOverloadedFunctionBody]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(x: 'A'):
+    x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    x = A()
+ at overload
+def f(x: 'B'):
+    x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    x = B()
+class A: pass
+class B: pass
+[out]
+
+[case testTypeCheckOverloadedMethodBody]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A:
+    @overload
+    def f(self, x: 'A'):
+        x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+        x = A()
+    @overload
+    def f(self, x: 'B'):
+        x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+        x = B()
+class B: pass
+[out]
+
+[case testCallToOverloadedFunction]
+from foo import *
+[file foo.pyi]
+from typing import overload
+f(C()) # E: No overload variant of "f" matches argument types [foo.C]
+f(A())
+f(B())
+
+ at overload
+def f(x: 'A') -> None: pass
+ at overload
+def f(x: 'B') -> None: pass
+
+class A: pass
+class B: pass
+class C: pass
+
+[case testOverloadedFunctionReturnValue]
+from foo import *
+[file foo.pyi]
+from typing import overload
+a, b = None, None # type: (A, B)
+b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = f(a)
+b = f(b)
+
+ at overload
+def f(x: 'A') -> 'A': pass
+ at overload
+def f(x: 'B') -> 'B': pass
+class A: pass
+class B: pass
+
+[case testCallToOverloadedMethod]
+from foo import *
+[file foo.pyi]
+from typing import overload
+A().f(C()) # E: No overload variant of "f" of "A" matches argument types [foo.C]
+A().f(A())
+A().f(B())
+
+class A:
+  @overload
+  def f(self, x: 'A') -> None: pass
+  @overload
+  def f(self, x: 'B') -> None: pass
+
+class B: pass
+class C: pass
+
+[case testOverloadedMethodReturnValue]
+from foo import *
+[file foo.pyi]
+from typing import overload
+a, b = None, None # type: (A, B)
+b = a.f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = a.f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = a.f(a)
+b = a.f(b)
+
+class A:
+  @overload
+  def f(self, x: 'A') -> 'A': pass
+  @overload
+  def f(self, x: 'B') -> 'B': pass
+class B: pass
+
+[case testOverloadsWithDifferentArgumentCounts]
+from foo import *
+[file foo.pyi]
+from typing import overload
+a, b = None, None # type: (A, B)
+a = f(a)
+b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+f(b)     # E: No overload variant of "f" matches argument types [foo.B]
+b = f(b, a)
+a = f(b, a) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+f(a, a)     # E: No overload variant of "f" matches argument types [foo.A, foo.A]
+f(b, b)     # E: No overload variant of "f" matches argument types [foo.B, foo.B]
+
+ at overload
+def f(x: 'A') -> 'A': pass
+ at overload
+def f(x: 'B', y: 'A') -> 'B': pass
+class A: pass
+class B: pass
+
+[case testGenericOverloadVariant]
+from foo import *
+[file foo.pyi]
+from typing import overload, TypeVar, Generic
+t = TypeVar('t')
+ab, ac, b, c = None, None, None, None # type: (A[B], A[C], B, C)
+b = f(ab)
+c = f(ac)
+b = f(ac) # E: Incompatible types in assignment (expression has type "C", variable has type "B")
+b = f(b)
+c = f(b)  # E: Incompatible types in assignment (expression has type "B", variable has type "C")
+ at overload
+def f(x: 'A[t]') -> t: pass
+ at overload
+def f(x: 'B') -> 'B': pass
+class A(Generic[t]): pass
+class B: pass
+class C: pass
+
+[case testOverloadedInit]
+from foo import *
+[file foo.pyi]
+from typing import overload
+a, b = None, None # type: (A, B)
+a = A(a)
+a = A(b)
+a = A(object()) # E: No overload variant of "A" matches argument types [builtins.object]
+
+class A:
+  @overload
+  def __init__(self, a: 'A') -> None: pass
+  @overload
+  def __init__(self, b: 'B') -> None: pass
+class B: pass
+
+[case testIntersectionTypeCompatibility]
+from foo import *
+[file foo.pyi]
+from typing import overload, Callable
+o = None # type: object
+a = None # type: A
+
+a = f # E: Incompatible types in assignment (expression has type overloaded function, variable has type "A")
+o = f
+
+ at overload
+def f(a: 'A') -> None: pass
+ at overload
+def f(a: Callable[[], None]) -> None: pass
+class A: pass
+
+[case testCompatibilityOfIntersectionTypeObjectWithStdType]
+from foo import *
+[file foo.pyi]
+from typing import overload
+t, a = None, None # type: (type, A)
+
+a = A # E: Incompatible types in assignment (expression has type Type[A], variable has type "A")
+t = A
+
+class A:
+    @overload
+    def __init__(self, a: 'A') -> None: pass
+    @overload
+    def __init__(self, a: 'B') -> None: pass
+class B: pass
+
+[case testOverloadedGetitem]
+from foo import *
+[file foo.pyi]
+from typing import overload
+a, b = None, None # type: int, str
+a = A()[a]
+b = A()[a] # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+b = A()[b]
+a = A()[b] # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+class A:
+    @overload
+    def __getitem__(self, a: int) -> int: pass
+    @overload
+    def __getitem__(self, b: str) -> str: pass
+
+[case testOverloadedGetitemWithGenerics]
+from foo import *
+[file foo.pyi]
+from typing import TypeVar, Generic, overload
+t = TypeVar('t')
+a, b, c = None, None, None # type: (A, B, C[A])
+a = c[a]
+b = c[a] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = c[b]
+b = c[b] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+class C(Generic[t]):
+    @overload
+    def __getitem__(self, a: 'A') -> t: pass
+    @overload
+    def __getitem__(self, b: 'B') -> t: pass
+class A: pass
+class B: pass
+
+[case testImplementingOverloadedMethod]
+from foo import *
+[file foo.pyi]
+from typing import overload
+from abc import abstractmethod, ABCMeta
+
+class I(metaclass=ABCMeta):
+    @overload
+    @abstractmethod
+    def f(self) -> None: pass
+    @overload
+    @abstractmethod
+    def f(self, a: 'A') -> None: pass
+class A(I):
+    @overload
+    def f(self) -> None: pass
+    @overload
+    def f(self, a: 'A') -> None: pass
+
+[case testOverloadWithFunctionType]
+from foo import *
+[file foo.pyi]
+from typing import overload, Callable
+class A: pass
+ at overload
+def f(x: A) -> None: pass
+ at overload
+def f(x: Callable[[], None]) -> None: pass
+
+f(A())
+[builtins fixtures/function.pyi]
+
+[case testVarArgsOverload]
+from foo import *
+[file foo.pyi]
+from typing import overload, Any
+ at overload
+def f(x: 'A', *more: Any) -> 'A': pass
+ at overload
+def f(x: 'B', *more: Any) -> 'A': pass
+f(A())
+f(A(), A, A)
+f(B())
+f(B(), B)
+f(B(), B, B)
+f(object()) # E: No overload variant of "f" matches argument types [builtins.object]
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testVarArgsOverload2]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(x: 'A', *more: 'B') -> 'A': pass
+ at overload
+def f(x: 'B', *more: 'A') -> 'A': pass
+f(A(), B())
+f(A(), B(), B())
+f(A(), A(), B()) # E: No overload variant of "f" matches argument types [foo.A, foo.A, foo.B]
+f(A(), B(), A()) # E: No overload variant of "f" matches argument types [foo.A, foo.B, foo.A]
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testOverloadWithTypeObject]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(a: 'A', t: type) -> None: pass
+ at overload
+def f(a: 'B', t: type) -> None: pass
+f(A(), B)
+f(B(), A)
+class A: pass
+class B: pass
+[builtins fixtures/function.pyi]
+
+[case testOverloadedInitAndTypeObjectInOverload]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(t: type) -> 'A': pass
+ at overload
+def f(t: 'A') -> 'B': pass
+a, b = None, None # type: (A, B)
+a = f(A)
+b = f(a)
+b = f(A) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(a) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+class A:
+   @overload
+   def __init__(self) -> None: pass
+   @overload
+   def __init__(self, a: 'A') -> None: pass
+class B:
+    pass
+
+[case testOverlappingErasedSignatures]
+from foo import *
+[file foo.pyi]
+from typing import overload, List
+ at overload
+def f(a: List[int]) -> int: pass
+ at overload
+def f(a: List[str]) -> int: pass
+list_int = [] # type: List[int]
+list_str = [] # type: List[str]
+list_object = [] # type: List[object]
+n = f(list_int)
+m = f(list_str)
+n = 1
+m = 1
+n = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+m = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+f(list_object) # E: Argument 1 to "f" has incompatible type List[object]; expected List[int]
+[builtins fixtures/list.pyi]
+
+[case testOverlappingOverloadSignatures]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def f(x: B) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(x: A) -> str: pass
+
+[case testContravariantOverlappingOverloadSignatures]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def f(x: A) -> A: pass
+ at overload
+def f(x: B) -> B: pass # This is more specific than the first item, and thus
+                       # will never be called.
+
+[case testPartiallyCovariantOverlappingOverloadSignatures]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def f(x: B) -> A: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(x: A) -> B: pass
+
+[case testPartiallyContravariantOverloadSignatures]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def g(x: A) -> int: pass # Fine, since A us supertype of B.
+ at overload
+def g(x: B) -> str: pass
+
+[case testCovariantOverlappingOverloadSignatures]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def g(x: B) -> B: pass
+ at overload
+def g(x: A) -> A: pass
+
+[case testCovariantOverlappingOverloadSignaturesWithSomeSameArgTypes]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def g(x: int, y: B) -> B: pass
+ at overload
+def g(x: int, y: A) -> A: pass
+
+[case testCovariantOverlappingOverloadSignaturesWithAnyType]
+from foo import *
+[file foo.pyi]
+from typing import Any, overload
+ at overload
+def g(x: int) -> int: pass
+ at overload
+def g(x: Any) -> Any: pass
+
+[case testContravariantOverlappingOverloadSignaturesWithAnyType]
+from foo import *
+[file foo.pyi]
+from typing import Any, overload
+ at overload
+def g(x: Any) -> Any: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def g(x: int) -> int: pass
+
+[case testOverloadedLtAndGtMethods]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A:
+    def __lt__(self, x: A) -> int: pass
+    def __gt__(self, x: A) -> int: pass
+class B:
+    @overload
+    def __lt__(self, x: B) -> int: pass
+    @overload
+    def __lt__(self, x: A) -> int: pass
+    @overload
+    def __gt__(self, x: B) -> int: pass
+    @overload
+    def __gt__(self, x: A) -> int: pass
+A() < A()
+A() < B()
+B() < A()
+B() < B()
+A() < object() # E: Unsupported operand types for < ("A" and "object")
+B() < object() # E: No overload variant of "__lt__" of "B" matches argument types [builtins.object]
+
+[case testOverloadedForwardMethodAndCallingReverseMethod]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A:
+    @overload
+    def __add__(self, x: 'A') -> int: pass
+    @overload
+    def __add__(self, x: int) -> int: pass
+class B:
+    def __radd__(self, x: A) -> int: pass
+A() + A()
+A() + 1
+A() + B()
+A() + '' # E: No overload variant of "__add__" of "A" matches argument types [builtins.str]
+
+[case testOverrideOverloadedMethodWithMoreGeneralArgumentTypes]
+from foo import *
+[file foo.pyi]
+from typing import overload
+
+class IntSub(int): pass
+
+class StrSub(str): pass
+class A:
+    @overload
+    def f(self, x: IntSub) -> int: return 0
+    @overload
+    def f(self, x: StrSub) -> str: return ''
+class B(A):
+    @overload
+    def f(self, x: int) -> int: return 0
+    @overload
+    def f(self, x: str) -> str: return ''
+[out]
+
+[case testOverrideOverloadedMethodWithMoreSpecificArgumentTypes]
+from foo import *
+[file foo.pyi]
+from typing import overload
+
+class IntSub(int): pass
+
+class StrSub(str): pass
+class A:
+    @overload
+    def f(self, x: int) -> int: return 0
+    @overload
+    def f(self, x: str) -> str: return ''
+class B(A):
+    @overload
+    def f(self, x: IntSub) -> int: return 0
+    @overload
+    def f(self, x: str) -> str: return ''
+class C(A):
+    @overload
+    def f(self, x: int) -> int: return 0
+    @overload
+    def f(self, x: StrSub) -> str: return ''
+class D(A):
+    @overload
+    def f(self, x: int) -> int: return 0
+    @overload
+    def f(self, x: str) -> str: return ''
+[out]
+tmp/foo.pyi:12: error: Signature of "f" incompatible with supertype "A"
+tmp/foo.pyi:17: error: Signature of "f" incompatible with supertype "A"
+
+[case testOverloadingAndDucktypeCompatibility]
+from foo import *
+[file foo.pyi]
+from typing import overload, _promote
+
+class A: pass
+
+ at _promote(A)
+class B: pass
+
+ at overload
+def f(n: B) -> B:
+    return n
+ at overload
+def f(n: A) -> A:
+    return n
+
+f(B()) + 'x'  # E: Unsupported left operand type for + ("B")
+f(A()) + 'x'  # E: Unsupported left operand type for + ("A")
+
+[case testOverloadingAndIntFloatSubtyping]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(x: float) -> None: pass
+ at overload
+def f(x: str) -> None: pass
+f(1.1)
+f('')
+f(1)
+f(()) # E: No overload variant of "f" matches argument types [Tuple[]]
+[builtins fixtures/primitives.pyi]
+[out]
+
+[case testOverloadingVariableInputs]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(x: int, y: int) -> None: pass
+ at overload
+def f(x: int) -> None: pass
+f(1)
+f(1, 2)
+z = (1, 2)
+f(*z)
+[builtins fixtures/primitives.pyi]
+[out]
+
+[case testTypeInferenceSpecialCaseWithOverloading]
+from foo import *
+[file foo.pyi]
+from typing import overload
+
+class A:
+    def __add__(self, x: A) -> A: pass
+class B:
+    def __radd__(self, x: A) -> B: pass
+
+ at overload
+def f(x: A) -> A: pass
+ at overload
+def f(x: B) -> B: pass
+
+f(A() + B())() # E: "B" not callable
+
+[case testKeywordArgOverload]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(x: int, y: str) -> int: pass
+ at overload
+def f(x: str, y: int) -> str: pass
+f(x=1, y='')() # E: "int" not callable
+f(y=1, x='')() # E: "str" not callable
+
+[case testIgnoreOverloadVariantBasedOnKeywordArg]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(x: int) -> int: pass
+ at overload
+def f(y: int) -> str: pass
+f(x=1)() # E: "int" not callable
+f(y=1)() # E: "str" not callable
+
+[case testOverloadWithTupleVarArg]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(x: int, y: str) -> int: pass
+ at overload
+def f(*x: str) -> str: pass
+f(*(1,))() # E: No overload variant of "f" matches argument types [Tuple[builtins.int]]
+f(*('',))() # E: "str" not callable
+f(*(1, ''))() # E: "int" not callable
+f(*(1, '', 1))() # E: No overload variant of "f" matches argument types [Tuple[builtins.int, builtins.str, builtins.int]]
+
+[case testPreferExactSignatureMatchInOverload]
+from foo import *
+[file foo.pyi]
+from typing import overload, List
+ at overload
+def f(x: int, y: List[int] = None) -> int: pass
+ at overload
+def f(x: int, y: List[str] = None) -> int: pass
+f(y=[1], x=0)() # E: "int" not callable
+f(y=[''], x=0)() # E: "int" not callable
+a = f(y=[['']], x=0) # E: List item 0 has incompatible type List[str]
+a() # E: "int" not callable
+[builtins fixtures/list.pyi]
+
+[case testOverloadWithDerivedFromAny]
+from foo import *
+[file foo.pyi]
+from typing import Any, overload
+Base = None  # type: Any
+
+class C:
+    @overload
+    def __init__(self, a: str) -> None: pass
+    @overload
+    def __init__(self, a: int) -> None: pass
+
+class Derived(Base):
+    def to_dict(self) -> C:
+        return C(self)  # fails without the fix for #1363
+C(Derived())  # fails without the hack
+C(Base())  # Always ok
+
+[case testOverloadWithBoundedTypeVar]
+from foo import *
+[file foo.pyi]
+from typing import overload, TypeVar
+T = TypeVar('T', bound=str)
+ at overload
+def f(x: T) -> T: pass
+ at overload
+def f(x: int) -> bool: pass
+class mystr(str): pass
+
+f('x')() # E: "str" not callable
+f(1)() # E: "bool" not callable
+f(1.1) # E: No overload variant of "f" matches argument types [builtins.float]
+f(mystr())() # E: "mystr" not callable
+[builtins fixtures/primitives.pyi]
+
+[case testOverloadedCallWithVariableTypes]
+from foo import *
+[file foo.pyi]
+from typing import overload, TypeVar, List
+T = TypeVar('T', bound=str)
+ at overload
+def f(x: T) -> T: pass
+ at overload
+def f(x: List[T]) -> None: pass
+class mystr(str): pass
+
+U = TypeVar('U', bound=mystr)
+V = TypeVar('V')
+def g(x: U, y: V) -> None:
+    f(x)() # E: "mystr" not callable
+    f(y) # E: No overload variant of "f" matches argument types [V`-2]
+    a = f([x]) # E: "f" does not return a value
+    f([y]) # E: Type argument 1 of "f" has incompatible value "V"
+    f([x, y]) # E: Type argument 1 of "f" has incompatible value "object"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testOverlapWithTypeVars]
+from foo import *
+[file foo.pyi]
+from typing import overload, TypeVar, Sequence
+T = TypeVar('T', bound=str)
+ at overload
+def f(x: Sequence[T]) -> None: pass
+ at overload
+def f(x: Sequence[int]) -> int: pass
+# These are considered overlapping despite the bound on T due to runtime type erasure.
+[out]
+tmp/foo.pyi:4: error: Overloaded function signatures 1 and 2 overlap with incompatible return types
+
+[case testOverlapWithTypeVarsWithValues]
+from foo import *
+[file foo.pyi]
+from typing import overload, TypeVar
+AnyStr = TypeVar('AnyStr', bytes, str)
+
+ at overload
+def f(x: int) -> int: pass
+ at overload
+def f(x: AnyStr) -> str: pass
+
+f(1)() # E: "int" not callable
+f('1')() # E: "str" not callable
+f(b'1')() # E: "str" not callable
+f(1.0) # E: No overload variant of "f" matches argument types [builtins.float]
+
+ at overload
+def g(x: AnyStr, *a: AnyStr) -> None: pass
+ at overload
+def g(x: int, *a: AnyStr) -> None: pass
+
+g('foo')
+g('foo', 'bar')
+g('foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object"
+g(1)
+g(1, 'foo')
+g(1, 'foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object"
+[builtins fixtures/primitives.pyi]
+
+[case testBadOverlapWithTypeVarsWithValues]
+from foo import *
+[file foo.pyi]
+from typing import overload, TypeVar
+AnyStr = TypeVar('AnyStr', bytes, str)
+
+ at overload
+def f(x: AnyStr) -> None: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(x: str) -> bool: pass
+[builtins fixtures/primitives.pyi]
+
+[case testOverlappingOverloadCounting]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def f(x: int) -> None: pass
+ at overload
+def f(x: B) -> str: pass # E: Overloaded function signatures 2 and 3 overlap with incompatible return types
+ at overload
+def f(x: A) -> int: pass
+
+[case testOverloadWithTupleMatchingTypeVar]
+from foo import *
+[file foo.pyi]
+from typing import TypeVar, Generic, Tuple, overload
+
+T = TypeVar('T')
+
+class A(Generic[T]):
+    @overload
+    def f(self, arg: T) -> None:
+        pass
+    @overload
+    def f(self, arg: T, default: int) -> None:
+        pass
+
+b = A()  # type: A[Tuple[int, int]]
+b.f((0, 0))
+b.f((0, '')) # E: Argument 1 to "f" of "A" has incompatible type "Tuple[int, str]"; expected "Tuple[int, int]"
+
+[case testSingleOverloadStub]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(a: int) -> None: pass
+def f(a: int) -> None: pass
+[out]
+tmp/foo.pyi:2: error: Single overload definition, multiple required
+tmp/foo.pyi:4: error: An implementation for an overloaded function is not allowed in a stub file
+
+[case testSingleOverload2]
+from foo import *
+[file foo.pyi]
+from typing import overload
+def f(a: int) -> None: pass
+ at overload
+def f(a: str) -> None: pass
+[out]
+tmp/foo.pyi:3: error: Name 'f' already defined
+tmp/foo.pyi:3: error: Single overload definition, multiple required
+
+[case testNonconsecutiveOverloads]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(a: int) -> None: pass
+1
+ at overload
+def f(a: str) -> None: pass
+[out]
+tmp/foo.pyi:2: error: Single overload definition, multiple required
+tmp/foo.pyi:5: error: Name 'f' already defined
+tmp/foo.pyi:5: error: Single overload definition, multiple required
+
+[case testNonconsecutiveOverloadsMissingFirstOverload]
+from foo import *
+[file foo.pyi]
+from typing import overload
+def f(a: int) -> None: pass
+1
+ at overload
+def f(a: str) -> None: pass
+[out]
+tmp/foo.pyi:4: error: Name 'f' already defined
+tmp/foo.pyi:4: error: Single overload definition, multiple required
+
+[case testNonconsecutiveOverloadsMissingLaterOverload]
+from foo import *
+[file foo.pyi]
+from typing import overload
+ at overload
+def f(a: int) -> None: pass
+1
+def f(a: str) -> None: pass
+[out]
+tmp/foo.pyi:2: error: Single overload definition, multiple required
+tmp/foo.pyi:5: error: Name 'f' already defined on line 2
+
+[case testOverloadTuple]
+from foo import *
+[file foo.pyi]
+from typing import overload, Tuple
+ at overload
+def f(x: int, y: Tuple[str, ...]) -> None: pass
+ at overload
+def f(x: int, y: str) -> None: pass
+f(1, ('2', '3'))
+f(1, (2, '3')) # E: Argument 2 to "f" has incompatible type "Tuple[int, str]"; expected Tuple[str, ...]
+f(1, ('2',))
+f(1, '2')
+f(1, (2, 3)) # E: Argument 2 to "f" has incompatible type "Tuple[int, int]"; expected Tuple[str, ...]
+x = ('2', '3')  # type: Tuple[str, ...]
+f(1, x)
+y = (2, 3)  # type: Tuple[int, ...]
+f(1, y) # E: Argument 2 to "f" has incompatible type Tuple[int, ...]; expected Tuple[str, ...]
+[builtins fixtures/tuple.pyi]
+
+[case testCallableSpecificOverload]
+from foo import *
+[file foo.pyi]
+from typing import overload, Callable
+ at overload
+def f(a: Callable[[], int]) -> None: pass
+ at overload
+def f(a: str) -> None: pass
+f(0)  # E: No overload variant of "f" matches argument types [builtins.int]
+
+[case testCustomRedefinitionDecorator]
+from typing import Any, Callable, Type
+
+class Chain(object):
+    def chain(self, function: Callable[[Any], int]) -> 'Chain':
+        return self
+
+class Test(object):
+    do_chain = Chain()
+
+    @do_chain.chain
+    def do_chain(self) -> int:
+        return 2
+
+    @do_chain.chain  # E: Name 'do_chain' already defined
+    def do_chain(self) -> int:
+        return 3
+
+t = Test()
+reveal_type(t.do_chain)  # E: Revealed type is '__main__.Chain'
diff --git a/test-data/unit/check-python2.test b/test-data/unit/check-python2.test
new file mode 100644
index 0000000..b3b899b
--- /dev/null
+++ b/test-data/unit/check-python2.test
@@ -0,0 +1,310 @@
+-- Type checker test cases for Python 2.x mode.
+
+
+[case testUnicode]
+u = u'foo'
+u = unicode()
+s = ''
+s = u'foo' # E: Incompatible types in assignment (expression has type "unicode", variable has type "str")
+s = b'foo'
+[builtins_py2 fixtures/python2.pyi]
+
+[case testTypeVariableUnicode]
+from typing import TypeVar
+T = TypeVar(u'T')
+
+[case testNamedTuple*sh Unicode]
+from typing import NamedTuple
+from collections import namedtuple
+N = NamedTuple(u'N', [(u'x', int)])
+n = namedtuple(u'n', u'x y')
+
+[builtins fixtures/dict.pyi]
+
+[case testPrintStatement]
+print ''() # E: "str" not callable
+print 1, 1() # E: "int" not callable
+
+[case testPrintStatementWithTarget]
+class A:
+    def write(self, s):
+        # type: (str) -> None
+        pass
+
+print >>A(), ''
+print >>None, ''
+print >>1, '' # E: "int" has no attribute "write"
+print >>(None + ''), None # E: Unsupported left operand type for + (None)
+
+[case testDivision]
+class A:
+    def __div__(self, x):
+        # type: (int) -> str
+        pass
+s = A() / 1
+s = ''
+s = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testStrUnicodeCompatibility]
+import typing
+def f(x):
+    # type: (unicode) -> None
+    pass
+f('')
+f(u'')
+f(b'')
+[builtins_py2 fixtures/python2.pyi]
+
+[case testStaticMethodWithCommentSignature]
+class A:
+    @staticmethod
+    def f(x): # type: (int) -> str
+        return ''
+A.f(1)
+A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+[builtins_py2 fixtures/staticmethod.pyi]
+
+[case testRaiseTuple]
+import typing
+raise BaseException, "a"
+raise BaseException, "a", None
+[builtins_py2 fixtures/exception.pyi]
+
+[case testTryExceptWithTuple]
+try:
+    None
+except BaseException, e:
+    e() # E: "BaseException" not callable
+[builtins_py2 fixtures/exception.pyi]
+
+[case testAlternateNameSuggestions]
+class Foo(object):
+    def say_hello(self):
+        pass
+    def say_hell(self):
+        pass
+    def say_hullo(self):
+        pass
+    def say_goodbye(self):
+        pass
+    def go_away(self):
+        pass
+    def go_around(self):
+        pass
+    def append(self):
+        pass
+    def extend(self):
+        pass
+    def _add(self):
+        pass
+
+f = Foo()
+f.say_hallo() # E: "Foo" has no attribute "say_hallo"; maybe "say_hullo", "say_hello", or "say_hell"?
+f.go_array() # E: "Foo" has no attribute "go_array"; maybe "go_away"?
+f.add() # E: "Foo" has no attribute "add"; maybe "append", "extend", or "_add"?
+
+[case testTupleArgListDynamicallyTyped]
+def f(x, (y, z)):
+    x = y + z
+f(1, 1)
+f(1, (1, 2))
+
+[case testTupleArgListAnnotated]
+from typing import Tuple
+def f(x, (y, z)): # type: (object, Tuple[int, str]) -> None
+    x() # E
+    y() # E
+    z() # E
+f(object(), (1, ''))
+f(1, 1) # E
+[builtins_py2 fixtures/tuple.pyi]
+[out]
+main:3: error: "object" not callable
+main:4: error: "int" not callable
+main:5: error: "str" not callable
+main:7: error: Argument 2 to "f" has incompatible type "int"; expected "Tuple[int, str]"
+
+[case testNestedTupleArgListAnnotated]
+from typing import Tuple
+def f(x, (y, (a, b))): # type: (object, Tuple[int, Tuple[str, int]]) -> None
+    x() # E
+    y() # E
+    a() # E
+    b() # E
+f(object(), (1, ('', 2)))
+f(1, 1) # E
+[builtins fixtures/tuple.pyi]
+[out]
+main:3: error: "object" not callable
+main:4: error: "int" not callable
+main:5: error: "str" not callable
+main:6: error: "int" not callable
+main:8: error: Argument 2 to "f" has incompatible type "int"; expected "Tuple[int, Tuple[str, int]]"
+
+[case testBackquoteExpr]
+`1`.x # E: "str" has no attribute "x"
+
+[case testPython2OnlyStdLibModuleWithoutStub]
+import asyncio
+import Bastion
+[out]
+main:1: error: Cannot find module named 'asyncio'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: No library stub file for standard library module 'Bastion'
+main:2: note: (Stub files are from https://github.com/python/typeshed)
+
+[case testImportFromPython2Builtin]
+from __builtin__ import int as i
+x = 1 # type: i
+y = '' # type: i  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testImportPython2Builtin]
+import __builtin__
+x = 1 # type: __builtin__.int
+y = '' # type: __builtin__.int  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testImportAsPython2Builtin]
+import __builtin__ as bi
+x = 1 # type: bi.int
+y = '' # type: bi.int  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testImportFromPython2BuiltinOverridingDefault]
+from __builtin__ import int
+x = 1 # type: int
+y = '' # type: int  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+-- Copied from check-functions.test
+[case testEllipsisWithArbitraryArgsOnBareFunctionInPython2]
+def f(x, y, z): # type: (...) -> None
+    pass
+
+-- Copied from check-functions.test
+[case testEllipsisWithSomethingAfterItFailsInPython2]
+def f(x, y, z): # type: (..., int) -> None
+    pass
+[out]
+main:1: error: Ellipses cannot accompany other argument types in function type signature.
+
+[case testLambdaTupleArgInPython2]
+f = lambda (x, y): x + y
+f((0, 0))
+[out]
+
+[case testLambdaSingletonTupleArgInPython2]
+f = lambda (x,): x + 1
+f((0,))
+[out]
+
+[case testLambdaNoTupleArgInPython2]
+f = lambda (x): x + 1
+f(0)
+[out]
+
+[case testDefTupleEdgeCasesPython2]
+def f((x,)): return x
+def g((x)): return x
+f(0) + g(0)
+[out]
+
+[case testLambdaAsSortKeyForTuplePython2]
+from typing import Any, Tuple, Callable
+def bar(key):
+    # type: (Callable[[Tuple[int, int]], int]) -> int
+    pass
+def foo():
+    # type: () -> int
+    return bar(key=lambda (a, b): a)
+[out]
+
+[case testImportBuiltins]
+
+import __builtin__
+__builtin__.str
+
+[case testUnicodeAlias]
+from typing import List
+Alias = List[u'Foo']
+class Foo: pass
+[builtins_py2 fixtures/python2.pyi]
+
+[case testExec]
+exec('print 1 + 1')
+
+[case testUnicodeDocStrings]
+# flags: --python-version=2.7
+__doc__ = u"unicode"
+
+class A:
+    u"unicode"
+
+def f():
+    # type: () -> None
+    u"unicode"
+
+[case testMetaclassBasics]
+class M(type):
+    x = 0  # type: int
+    def test(cls):
+        # type: () -> str
+        return "test"
+
+class A(object):
+    __metaclass__ = M
+
+reveal_type(A.x) # E: Revealed type is 'builtins.int'
+reveal_type(A.test()) # E: Revealed type is 'builtins.str'
+
+[case testImportedMetaclass]
+import m
+
+class A(object):
+    __metaclass__ = m.M
+
+reveal_type(A.x) # E: Revealed type is 'builtins.int'
+reveal_type(A.test()) # E: Revealed type is 'builtins.str'
+[file m.py]
+class M(type):
+    x = 0
+    def test(cls):
+        # type: () -> str
+        return "test"
+
+[case testDynamicMetaclass]
+class C(object):
+    __metaclass__ = int()  # E: Dynamic metaclass not supported for 'C'
+
+[case testMetaclassDefinedAsClass]
+class C(object):
+    class __metaclass__: pass # E: Metaclasses defined as inner classes are not supported
+
+[case testErrorInMetaclass]
+x = 0
+class A(object):
+    __metaclass__ = m.M  # E: Name 'm' is not defined
+class B(object):
+    __metaclass__ = M  # E: Name 'M' is not defined
+
+[case testMetaclassAndSkippedImportInPython2]
+# flags: --ignore-missing-imports
+from missing import M
+class A(object):
+    __metaclass__ = M
+    y = 0
+reveal_type(A.y) # E: Revealed type is 'builtins.int'
+A.x # E: Type[A] has no attribute "x"
+
+[case testAnyAsBaseOfMetaclass]
+from typing import Any, Type
+M = None  # type: Any
+class MM(M): pass
+class A(object):
+    __metaclass__ = MM
+
+[case testSelfTypeNotSelfType2]
+class A:
+    def g(self):
+        # type: (None) -> None
+        pass
+[out]
+main:2: error: Invalid type for self, or extra argument type in function annotation
+main:2: note: (Hint: typically annotations omit the type for self)
diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test
new file mode 100644
index 0000000..d0c4a56
--- /dev/null
+++ b/test-data/unit/check-selftype.test
@@ -0,0 +1,378 @@
+[case testSelfTypeInstance]
+from typing import TypeVar
+
+T = TypeVar('T', bound='A', covariant=True)
+
+class A:
+    def copy(self: T) -> T: pass
+
+class B(A):
+    pass
+
+reveal_type(A().copy)  # E: Revealed type is 'def () -> __main__.A*'
+reveal_type(B().copy)  # E: Revealed type is 'def () -> __main__.B*'
+reveal_type(A().copy())  # E: Revealed type is '__main__.A*'
+reveal_type(B().copy())  # E: Revealed type is '__main__.B*'
+
+[builtins fixtures/bool.pyi]
+
+[case testSelfTypeStaticAccess]
+from typing import TypeVar
+
+T = TypeVar('T', bound='A', covariant=True)
+class A:
+    def copy(self: T) -> T: pass
+
+class B(A):
+    pass
+
+# Erased instances appear on reveal_type; unrelated to self type
+def f(a: A) -> None: pass
+f(A.copy(A()))
+f(A.copy(B()))
+f(B.copy(B()))
+
+# TODO: make it an error
+# f(B.copy(A()))
+
+def g(a: B) -> None: pass
+g(A.copy(A()))  # E: Argument 1 to "g" has incompatible type "A"; expected "B"
+g(A.copy(B()))
+g(B.copy(B()))
+
+[builtins fixtures/bool.pyi]
+
+[case testSelfTypeReturn]
+from typing import TypeVar, Type
+
+R = TypeVar('R')
+def _type(self: R) -> Type[R]: pass
+
+T = TypeVar('T', bound='A', covariant=True)
+class A:
+    def copy(self: T) -> T:
+        if B():
+            return A()  # E: Incompatible return value type (got "A", expected "T")
+        elif A():
+            return B()  # E: Incompatible return value type (got "B", expected "T")
+        reveal_type(_type(self))  # E: Revealed type is 'Type[T`-1]'
+        return reveal_type(_type(self)())  # E: Revealed type is 'T`-1'
+
+class B(A):
+    pass
+
+Q = TypeVar('Q', bound='C', covariant=True)
+class C:
+    def __init__(self, a: int) -> None: pass
+
+    def copy(self: Q) -> Q:
+        if self:
+            return reveal_type(_type(self)(1))  # E: Revealed type is 'Q`-1'
+        else:
+            return _type(self)()  # E: Too few arguments for "C"
+
+
+[builtins fixtures/bool.pyi]
+
+[case testSelfTypeClass]
+from typing import TypeVar, Type
+
+T = TypeVar('T', bound='A')
+
+class A:
+    @classmethod
+    def new(cls: Type[T]) -> T:
+        return reveal_type(cls())  # E: Revealed type is 'T`-1'
+
+class B(A):
+    pass
+
+Q = TypeVar('Q', bound='C', covariant=True)
+class C:
+    def __init__(self, a: int) -> None: pass
+
+    @classmethod
+    def new(cls: Type[Q]) -> Q:
+        if cls:
+            return cls(1)
+        else:
+            return cls()  # E: Too few arguments for "C"
+
+
+reveal_type(A.new)  # E: Revealed type is 'def () -> __main__.A*'
+reveal_type(B.new)  # E: Revealed type is 'def () -> __main__.B*'
+reveal_type(A.new())  # E: Revealed type is '__main__.A*'
+reveal_type(B.new())  # E: Revealed type is '__main__.B*'
+
+[builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeOverride]
+from typing import TypeVar, cast
+
+T = TypeVar('T', bound='A', covariant=True)
+
+class A:
+    def copy(self: T) -> T: pass
+
+class B(A):
+    pass
+
+Q = TypeVar('Q', bound='C', covariant=True)
+class C(A):
+    def copy(self: Q) -> Q: pass
+
+reveal_type(C().copy)  # E: Revealed type is 'def () -> __main__.C*'
+reveal_type(C().copy())  # E: Revealed type is '__main__.C*'
+reveal_type(cast(A, C()).copy)  # E: Revealed type is 'def () -> __main__.A*'
+reveal_type(cast(A, C()).copy())  # E: Revealed type is '__main__.A*'
+
+[builtins fixtures/bool.pyi]
+
+[case testSelfTypeSuper]
+from typing import TypeVar, cast
+
+T = TypeVar('T', bound='A', covariant=True)
+
+class A:
+    def copy(self: T) -> T: pass
+
+Q = TypeVar('Q', bound='B', covariant=True)
+class B(A):
+    def copy(self: Q) -> Q:
+        reveal_type(self)  # E: Revealed type is 'Q`-1'
+        reveal_type(super().copy)  # E: Revealed type is 'def () -> Q`-1'
+        return super().copy()
+
+[builtins fixtures/bool.pyi]
+
+[case testSelfTypeRecursiveBinding]
+from typing import TypeVar, Callable, Type
+
+T = TypeVar('T', bound='A', covariant=True)
+class A:
+    # TODO: This is potentially unsafe, as we use T in an argument type
+    def copy(self: T, factory: Callable[[T], T]) -> T:
+        return factory(self)
+
+    @classmethod
+    def new(cls: Type[T], factory: Callable[[T], T]) -> T:
+        reveal_type(cls)   # E: Revealed type is 'Type[T`-1]'
+        reveal_type(cls())   # E: Revealed type is 'T`-1'
+        cls(2)  # E: Too many arguments for "A"
+        return cls()
+
+class B(A):
+    pass
+
+reveal_type(A().copy)  # E: Revealed type is 'def (factory: def (__main__.A*) -> __main__.A*) -> __main__.A*'
+reveal_type(B().copy)  # E: Revealed type is 'def (factory: def (__main__.B*) -> __main__.B*) -> __main__.B*'
+reveal_type(A.new)  # E: Revealed type is 'def (factory: def (__main__.A*) -> __main__.A*) -> __main__.A*'
+reveal_type(B.new)  # E: Revealed type is 'def (factory: def (__main__.B*) -> __main__.B*) -> __main__.B*'
+
+[builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeBound]
+from typing import TypeVar, Callable, cast
+
+TA = TypeVar('TA', bound='A', covariant=True)
+
+class A:
+    def copy(self: TA) -> TA:
+        pass
+
+class C(A):
+    def copy(self: C) -> C:
+        pass
+
+class D(A):
+   def copy(self: A) -> A:  # E: Return type of "copy" incompatible with supertype "A"
+       pass
+
+TB = TypeVar('TB', bound='B', covariant=True)
+class B(A):
+    x = 1
+    def copy(self: TB) -> TB:
+        reveal_type(self.x)  # E: Revealed type is 'builtins.int'
+        return cast(TB, None)
+
+[builtins fixtures/bool.pyi]
+
+-- # TODO: fail for this
+-- [case testSelfTypeBare]
+-- from typing import TypeVar, Type
+--
+-- T = TypeVar('T', bound='E')
+--
+-- class E:
+--     def copy(self: T, other: T) -> T: pass
+
+[case testSelfTypeClone]
+from typing import TypeVar, Type
+T = TypeVar('T', bound='C')
+
+class C:
+    def copy(self: T) -> T:
+        return self
+
+    @classmethod
+    def new(cls: Type[T]) -> T:
+        return cls()
+
+class D(C): pass
+
+reveal_type(D.new)  # E: Revealed type is 'def () -> __main__.D*'
+reveal_type(D().new)  # E: Revealed type is 'def () -> __main__.D*'
+reveal_type(D.new())  # E: Revealed type is '__main__.D*'
+reveal_type(D().new())  # E: Revealed type is '__main__.D*'
+
+Q = TypeVar('Q', bound=C)
+
+def clone(arg: Q) -> Q:
+    reveal_type(arg.copy)  # E: Revealed type is 'def () -> Q`-1'
+    reveal_type(arg.copy())  # E: Revealed type is 'Q`-1'
+    reveal_type(arg.new)  # E: Revealed type is 'def () -> Q`-1'
+    reveal_type(arg.new())  # E: Revealed type is 'Q`-1'
+    return arg.copy()
+
+def make(cls: Type[Q]) -> Q:
+    reveal_type(cls.new)  # E: Revealed type is 'def () -> Q`-1'
+    reveal_type(cls().new)  # E: Revealed type is 'def () -> Q`-1'
+    reveal_type(cls().new())  # E: Revealed type is 'Q`-1'
+    return cls.new()
+
+[builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeGeneric]
+from typing import TypeVar
+
+T = TypeVar('T', int, str)
+
+class A:
+    pass
+
+class B(A):
+    def __init__(self, arg: T) -> None:
+        super(B, self).__init__()
+
+[case testSelfTypeNonsensical]
+from typing import TypeVar, Type
+
+T = TypeVar('T', bound=str)
+class A:
+    def foo(self: T) -> T:   # E: The erased type of self 'builtins.str' is not a supertype of its class '__main__.A'
+        return self
+
+    @classmethod
+    def cfoo(cls: Type[T]) -> T:  # E: The erased type of self 'Type[builtins.str]' is not a supertype of its class 'Type[__main__.A]'
+        return cls()
+
+Q = TypeVar('Q', bound='B')
+class B:
+    def foo(self: Q) -> Q:
+        return self
+
+    @classmethod
+    def cfoo(cls: Type[Q]) -> Q:
+        return cls()
+
+class C:
+    def foo(self: C) -> C: return self
+
+    @classmethod
+    def cfoo(cls: Type[C]) -> C:
+        return cls()
+
+class D:
+    def foo(self: Q) -> Q:  # E: The erased type of self '__main__.B' is not a supertype of its class '__main__.D'
+        return self
+
+    @staticmethod
+    def bar(self: str) -> str:
+        return self
+
+    @classmethod
+    def cfoo(cls: Type[Q]) -> Q:  # E: The erased type of self 'Type[__main__.B]' is not a supertype of its class 'Type[__main__.D]'
+        return cls()
+
+[builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeLambdaDefault]
+from typing import Callable
+class C:
+    @classmethod
+    def foo(cls,
+            arg: Callable[[int], str] = lambda a: ''
+            ) -> None:
+        pass
+
+    def bar(self,
+            arg: Callable[[int], str] = lambda a: ''
+            ) -> None:
+        pass
+[builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeNew]
+from typing import TypeVar, Type
+
+T = TypeVar('T', bound=A)
+class A:
+    def __new__(cls: Type[T]) -> T:
+        return cls()
+
+    def __init_subclass__(cls: Type[T]) -> None:
+        pass
+
+class B:
+    def __new__(cls: Type[T]) -> T:  # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class 'Type[__main__.B]'
+        return cls()
+
+    def __init_subclass__(cls: Type[T]) -> None:  # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class 'Type[__main__.B]'
+        pass
+
+class C:
+    def __new__(cls: Type[C]) -> C:
+        return cls()
+
+    def __init_subclass__(cls: Type[C]) -> None:
+        pass
+
+class D:
+    def __new__(cls: D) -> D:  # E: The erased type of self '__main__.D' is not a supertype of its class 'Type[__main__.D]'
+        return cls
+
+    def __init_subclass__(cls: D) -> None:  # E: The erased type of self '__main__.D' is not a supertype of its class 'Type[__main__.D]'
+        pass
+
+class E:
+    def __new__(cls) -> E:
+        reveal_type(cls)  # E: Revealed type is 'def () -> __main__.E'
+        return cls()
+
+    def __init_subclass__(cls) -> None:
+        reveal_type(cls)  # E: Revealed type is 'def () -> __main__.E'
+
+[case testSelfTypeProperty]
+from typing import TypeVar
+
+T = TypeVar('T', bound='A')
+
+class A:
+    @property
+    def member(self: T) -> T:
+        pass
+
+class B(A):
+    pass
+
+reveal_type(A().member)  # E: Revealed type is '__main__.A*'
+reveal_type(B().member)  # E: Revealed type is '__main__.B*'
+
+[builtins fixtures/property.pyi]
+
+[case testSelfTypeNotSelfType]
+# Friendlier error messages for common mistakes. See #2950
+class A:
+    def f(x: int) -> None: ...
+    # def g(self: None) -> None: ... see in check-python2.test
+[out]
+main:3: error: Self argument missing for a non-static method (or an invalid type for self)
diff --git a/test-data/unit/check-semanal-error.test b/test-data/unit/check-semanal-error.test
new file mode 100644
index 0000000..1869bbe
--- /dev/null
+++ b/test-data/unit/check-semanal-error.test
@@ -0,0 +1,97 @@
+-- Type checking after an error during semantic analysis
+-- -----------------------------------------------------
+--
+-- This tests both the semantic analyzer (that it does not generate
+-- corrupt state on error) and the type checker (that it can deal with
+-- whatever state the semantic analyzer sets up).
+
+-- TODO:
+--  - invalid type in annotation
+--  - invalid function comment type annotation
+--  - invalid multiple assignment type annotation
+--  - using a type variable as a value
+--  - using special names defined in typing as values
+
+[case testMissingModuleImport1]
+import m # E
+m.foo()
+m.x = m.y
+1() # E
+[out]
+main:1: error: Cannot find module named 'm'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:4: error: "int" not callable
+
+[case testMissingModuleImport2]
+from m import x # E
+x.foo()
+x.a = x.b
+1() # E
+[out]
+main:1: error: Cannot find module named 'm'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:4: error: "int" not callable
+
+[case testMissingModuleImport3]
+from m import * # E
+x # E
+1() # E
+[out]
+main:1: error: Cannot find module named 'm'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Name 'x' is not defined
+main:3: error: "int" not callable
+
+[case testInvalidBaseClass1]
+class A(X): # E: Name 'X' is not defined
+    x = 1
+A().foo(1)
+A().x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testInvalidBaseClass2]
+X = 1
+class A(X): # E
+    x = 1
+A().foo(1)
+A().x = '' # E
+[out]
+main:2: error: Invalid type "__main__.X"
+main:2: error: Invalid base class
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+
+[case testInvalidNumberOfTypeArgs]
+from typing import TypeVar
+T = TypeVar('T')
+class C:  # Forgot to add type params here
+    def __init__(self, t: T) -> None: pass
+c = C(t=3)  # type: C[int]  # E: "C" expects no type arguments, but 1 given
+
+[case testBreakOutsideLoop]
+break # E: 'break' outside loop
+
+[case testContinueOutsideLoop]
+continue # E: 'continue' outside loop
+
+[case testYieldOutsideFunction]
+yield # E: 'yield' outside function
+
+[case testYieldFromOutsideFunction]
+x = 1
+yield from x # E: 'yield from' outside function
+
+[case testImportFuncDup]
+import m
+def m() -> None: ...  # ok
+
+[file m.py]
+[out]
+
+[case testIgnoredImportDup]
+import m # type: ignore
+from m import f # type: ignore
+def m() -> None: ...  # ok
+def f() -> None: ...  # ok
+
+[out]
+
diff --git a/test-data/unit/check-serialize.test b/test-data/unit/check-serialize.test
new file mode 100644
index 0000000..2996b8b
--- /dev/null
+++ b/test-data/unit/check-serialize.test
@@ -0,0 +1,1249 @@
+-- Serialization test cases (incremental type checking)
+--
+-- These test that modules deserialized from cache files behave
+-- identically to modules that have undergone full type checking.
+--
+-- These tests are written using the same syntax as test cases in
+-- check-incremental.test.  Look at the comment at that the top of
+-- that file for the details of how these tests work.
+--
+-- There is probably some overlap with check-incremental.test, but it
+-- is perhaps not worth trying to simplify these, since a few redundant
+-- test cases are cheap but accidentally losing test coverage is bad.
+--
+-- These are intended to be straightforward, and do not test import
+-- cycles and other tricky business.  Add test cases for complex things
+-- to check-incremental.test.
+
+--
+-- Basic things
+--
+
+[case testSerializeModuleAttribute]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+y = b.x  # type: int
+[file b.py]
+x = ''
+-- We only do the following two sections once here to avoid repetition.
+-- Most other test cases are similar.
+[rechecked a]
+[stale]
+[out2]
+tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+--
+-- Functions
+--
+
+[case testSerializeAnnotatedFunction]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+b.f(1)
+x = b.f('')  # type: str
+[file b.py]
+def f(x: str) -> int: pass
+[out2]
+tmp/a.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str"
+tmp/a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testSerializeUnannotatedFunction]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+b.f(x=1)
+b.f()
+[file b.py]
+def f(x): pass
+[out2]
+tmp/a.py:3: error: Too few arguments for "f"
+
+[case testSerializeGenericFunction]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import f
+reveal_type(f(1))
+reveal_type(f(x=''))
+[file b.py]
+from typing import TypeVar
+
+T = TypeVar('T')
+
+def f(x: T) -> T: return x
+[out2]
+tmp/a.py:2: error: Revealed type is 'builtins.int*'
+tmp/a.py:3: error: Revealed type is 'builtins.str*'
+
+[case testSerializeFunctionReturningGenericFunction]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+reveal_type(b.f)
+reveal_type(b.f()(''))
+[file b.py]
+from typing import TypeVar, Callable
+
+T = TypeVar('T')
+
+def f() -> Callable[[T], T]: pass
+[out2]
+tmp/a.py:2: error: Revealed type is 'def () -> def [T] (T`-1) -> T`-1'
+tmp/a.py:3: error: Revealed type is 'builtins.str*'
+
+[case testSerializeArgumentKinds]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import f
+f(1, z=1)
+f(1, '', z=1)
+f(1, y='', z=1)
+f(1, '', 2, 3, z=1)
+f(1, '', zz=1, z=1)
+f(1, '', foo='', z=1)
+[file b.py]
+def f(x: int,
+      y: str = '',
+      *args: int,
+      z: int,
+      zz: int = 1,
+      **kw: str) -> None: pass
+[builtins fixtures/dict.pyi]
+[out2]
+
+[case testSerializeCallableWithBoundTypeArguments]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+x = b.f
+[file b.py]
+from typing import TypeVar, Generic
+
+T = TypeVar('T')
+
+class C(Generic[T]):
+    def f(self, x: T) -> None: pass
+
+c: C[int]
+f = c.f
+[out]
+[out2]
+
+[case testSerializePositionalOnlyArgument]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+b.f(1)
+b.f('')
+b.f(__x=1)
+[file b.py]
+def f(__x: int) -> None: pass
+[out2]
+tmp/a.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+tmp/a.py:4: error: Unexpected keyword argument "__x" for "f"
+
+[case testSerializeArgumentKindsErrors]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import f
+f('', z=1)           # Line 2
+f(1, 2, z=1)         # 3
+f(1, y=1, z=1)       # 4
+f(1, '', 2, '', z=1) # 5
+f(1, '', z='')       # 6
+f(1, '', zz='', z=1) # 7
+f(1, '', z=1, foo=1) # 8
+[file b.py]
+def f(x: int,
+      y: str = '',
+      *args: int,
+      z: int,
+      zz: int = 1,
+      **kw: str) -> None: pass
+[builtins fixtures/dict.pyi]
+[out2]
+tmp/a.py:2: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+tmp/a.py:3: error: Argument 2 to "f" has incompatible type "int"; expected "str"
+tmp/a.py:4: error: Argument 2 to "f" has incompatible type "int"; expected "str"
+tmp/a.py:5: error: Argument 4 to "f" has incompatible type "str"; expected "int"
+tmp/a.py:6: error: Argument 3 to "f" has incompatible type "str"; expected "int"
+tmp/a.py:7: error: Argument 3 to "f" has incompatible type "str"; expected "int"
+tmp/a.py:8: error: Argument 4 to "f" has incompatible type "int"; expected "str"
+
+[case testSerializeOverloadedFunction]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+reveal_type(b.f(1))
+reveal_type(b.f(''))
+[file b.pyi]
+from typing import overload
+ at overload
+def f(x: int) -> int: pass
+ at overload
+def f(x: str) -> str: pass
+[out2]
+tmp/a.py:2: error: Revealed type is 'builtins.int'
+tmp/a.py:3: error: Revealed type is 'builtins.str'
+
+[case testSerializeDecoratedFunction]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+reveal_type(b.f(''))
+b.f(x=1)
+[file b.py]
+from typing import Callable
+def dec(f: Callable[[int], int]) -> Callable[[str], str]: pass
+ at dec
+def f(x: int) -> int: pass
+[out2]
+tmp/a.py:2: error: Revealed type is 'builtins.str'
+tmp/a.py:3: error: Unexpected keyword argument "x" for "f"
+
+--
+-- Classes
+--
+
+[case testSerializeClassAttribute]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+b.A().x = ''
+[file b.py]
+class A:
+    x = 1
+[out2]
+tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testSerializeMethod]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+b.A().f('')
+[file b.py]
+class A:
+    def f(self, x: int) -> None: pass
+[out2]
+tmp/a.py:2: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testSerialize__init__]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+A('')
+class B(A):
+    def f(self) -> None:
+        super().__init__('')
+[file b.py]
+class A:
+    def __init__(self, x: int) -> None: pass
+[out2]
+tmp/a.py:2: error: Argument 1 to "A" has incompatible type "str"; expected "int"
+tmp/a.py:5: error: Argument 1 to "__init__" of "A" has incompatible type "str"; expected "int"
+
+[case testSerializeOverloaded__init__]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+A(object()) # E
+A(x='')
+A(0)
+class B(A):
+    def f(self) -> None:
+        super().__init__(object()) # E
+        super().__init__('')
+        super().__init__(0)
+[file b.pyi]
+from typing import overload
+class A:
+    @overload
+    def __init__(self, x: int) -> None: pass
+    @overload
+    def __init__(self, x: str) -> None: pass
+[out2]
+tmp/a.py:2: error: No overload variant of "A" matches argument types [builtins.object]
+tmp/a.py:7: error: No overload variant of "__init__" of "A" matches argument types [builtins.object]
+
+[case testSerialize__new__]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+A('')
+[file b.py]
+class A:
+    def __new__(cls, x: int) -> 'A': pass
+[out2]
+tmp/a.py:2: error: Argument 1 to "A" has incompatible type "str"; expected "int"
+
+[case testSerializeClassVar]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+A.x = ''
+A().x = 1
+[file b.py]
+from typing import ClassVar
+class A:
+    x: ClassVar[int]
+[out2]
+tmp/a.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/a.py:3: error: Cannot assign to class variable "x" via instance
+
+[case testSerializeGenericClass]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+a1: A[int, str] = A(1)
+a2: A[int, str] = A('')
+reveal_type(a1.y)
+reveal_type(a1.f())
+[file b.py]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class A(Generic[T, S]):
+    x: T
+    y: S
+    def __init__(self, x: T) -> None:
+        self.x = x
+    def f(self) -> T:
+        return self.x
+[out2]
+tmp/a.py:3: error: Argument 1 to "A" has incompatible type "str"; expected "int"
+tmp/a.py:4: error: Revealed type is 'builtins.str*'
+tmp/a.py:5: error: Revealed type is 'builtins.int*'
+
+[case testSerializeAbstractClass]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+A()
+class B(A):
+    def f(self) -> None: pass
+    x: int
+B()
+a: A
+a.f()
+a.x = 1
+[file b.py]
+from abc import ABCMeta, abstractmethod, abstractproperty
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self) -> None: pass
+    @abstractproperty
+    def x(self) -> int: return 0
+[out2]
+tmp/a.py:2: error: Cannot instantiate abstract class 'A' with abstract attributes 'f' and 'x'
+tmp/a.py:9: error: Property "x" defined in "A" is read-only
+
+[case testSerializeStaticMethod]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+A.f(1)
+A.f()
+A().f()
+[file b.py]
+class A:
+    @staticmethod
+    def f() -> None: pass
+[builtins fixtures/staticmethod.pyi]
+[out2]
+tmp/a.py:2: error: Too many arguments for "f" of "A"
+
+[case testSerializeClassMethod]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+A.f(1)
+A.f()
+A().f()
+[file b.py]
+class A:
+    @classmethod
+    def f(cls) -> None: pass
+[builtins fixtures/classmethod.pyi]
+[out2]
+tmp/a.py:2: error: Too many arguments for "f" of "A"
+
+[case testSerializeReadOnlyProperty]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+reveal_type(A().x)
+A().x = 0
+[file b.py]
+class A:
+    @property
+    def x(self) -> int: return 0
+[builtins fixtures/property.pyi]
+[out2]
+tmp/a.py:2: error: Revealed type is 'builtins.int'
+tmp/a.py:3: error: Property "x" defined in "A" is read-only
+
+[case testSerializeReadWriteProperty]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+reveal_type(A().x)
+A().x = ''
+A().x = 0
+[file b.py]
+class A:
+    @property
+    def x(self) -> int: return 0
+    @x.setter
+    def x(self, v: int) -> None: pass
+[builtins fixtures/property.pyi]
+[out2]
+tmp/a.py:2: error: Revealed type is 'builtins.int'
+tmp/a.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testSerializeSelfType]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+reveal_type(A().f())
+class B(A): pass
+reveal_type(B().f())
+[file b.py]
+from typing import TypeVar
+T = TypeVar('T', bound='A')
+class A:
+    def f(self: T) -> T: return self
+[out2]
+tmp/a.py:2: error: Revealed type is 'b.A*'
+tmp/a.py:4: error: Revealed type is 'a.B*'
+
+[case testSerializeInheritance]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A, B, C
+C().f(1) # E
+C().g(1) # E
+reveal_type(C().h())
+a: A = C()
+b: B = C()
+i: int = C() # E
+[file b.py]
+class A:
+    def f(self) -> int: pass
+class B:
+    def g(self) -> str: pass
+    def h(self) -> object: pass
+class C(A, B):
+    def h(self) -> int: pass
+[out2]
+tmp/a.py:2: error: Too many arguments for "f" of "A"
+tmp/a.py:3: error: Too many arguments for "g" of "B"
+tmp/a.py:4: error: Revealed type is 'builtins.int'
+tmp/a.py:7: error: Incompatible types in assignment (expression has type "C", variable has type "int")
+
+[case testSerializeGenericInheritance]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import B
+b: B[int]
+reveal_type(b.f())
+[file b.py]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def f(self) -> T: pass
+class B(A[A[T]]):
+    pass
+[out2]
+tmp/a.py:3: error: Revealed type is 'b.A*[builtins.int*]'
+
+[case testSerializeFixedLengthTupleBaseClass]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+a: A
+a.f(1)
+reveal_type((a[0], a[1]))
+[file b.py]
+from typing import Tuple
+class A(Tuple[int, str]):
+    def f(self) -> None: pass
+[builtins fixtures/tuple.pyi]
+[out2]
+tmp/a.py:3: error: Too many arguments for "f" of "A"
+tmp/a.py:4: error: Revealed type is 'Tuple[builtins.int, builtins.str]'
+
+[case testSerializeVariableLengthTupleBaseClass]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+a: A
+a.f(1)
+reveal_type((a[0], a[1]))
+[file b.py]
+from typing import Tuple
+class A(Tuple[int, ...]):
+    def f(self) -> None: pass
+[builtins fixtures/tuple.pyi]
+[out2]
+tmp/a.py:3: error: Too many arguments for "f" of "A"
+tmp/a.py:4: error: Revealed type is 'Tuple[builtins.int*, builtins.int*]'
+
+[case testSerializePlainTupleBaseClass]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+a: A
+a.f(1)
+reveal_type((a[0], a[1]))
+[file b.py]
+from typing import Tuple
+class A(tuple):
+    def f(self) -> None: pass
+[builtins fixtures/tuple.pyi]
+[out2]
+tmp/a.py:3: error: Too many arguments for "f" of "A"
+tmp/a.py:4: error: Revealed type is 'Tuple[Any, Any]'
+
+[case testSerializeNamedTupleBaseClass]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+a: A
+a.f(1)
+reveal_type((a[0], a[1]))
+reveal_type((a.x, a.y))
+[file b.py]
+from typing import NamedTuple
+class A(NamedTuple('N', [('x', int), ('y', str)])):
+    def f(self) -> None: pass
+[builtins fixtures/tuple.pyi]
+[out2]
+tmp/a.py:3: error: Too many arguments for "f" of "A"
+tmp/a.py:4: error: Revealed type is 'Tuple[builtins.int, builtins.str]'
+tmp/a.py:5: error: Revealed type is 'Tuple[builtins.int, builtins.str]'
+
+[case testSerializeAnyBaseClass]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import B
+B().f(1)
+reveal_type(B().xyz)
+[file b.py]
+from typing import Any
+A: Any
+class B(A):
+    def f(self) -> None: pass
+[builtins fixtures/tuple.pyi]
+[out2]
+tmp/a.py:2: error: Too many arguments for "f" of "B"
+tmp/a.py:3: error: Revealed type is 'Any'
+
+[case testSerializeIndirectAnyBaseClass]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import C
+C().f(1)
+C().g(1)
+reveal_type(C().xyz)
+[file b.py]
+from typing import Any
+A: Any
+class B(A):
+    def f(self) -> None: pass
+class C(B):
+    def g(self) -> None: pass
+[builtins fixtures/tuple.pyi]
+[out2]
+tmp/a.py:2: error: Too many arguments for "f" of "B"
+tmp/a.py:3: error: Too many arguments for "g" of "C"
+tmp/a.py:4: error: Revealed type is 'Any'
+
+[case testSerializeNestedClass]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+b.A.B().f(1)
+b.A.B.C().g(1)
+b.b.f(1)
+b.c.g(1)
+[file b.py]
+class A:
+    class B:
+        def f(self) -> None: pass
+        class C:
+            def g(self) -> None: pass
+b: A.B
+c: A.B.C
+[builtins fixtures/tuple.pyi]
+[out2]
+tmp/a.py:2: error: Too many arguments for "f" of "B"
+tmp/a.py:3: error: Too many arguments for "g" of "C"
+tmp/a.py:4: error: Too many arguments for "f" of "B"
+tmp/a.py:5: error: Too many arguments for "g" of "C"
+
+[case testSerializeCallableVsTypeObjectDistinction]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+t: type
+t = b.A
+t = b.f # E
+[file b.py]
+class A: pass
+def f() -> None: pass
+[builtins fixtures/tuple.pyi]
+[out2]
+tmp/a.py:4: error: Incompatible types in assignment (expression has type Callable[[], None], variable has type "type")
+
+[case testSerializeOverloadedVsTypeObjectDistinction]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+t: type
+t = b.A
+t = b.f # E
+[file b.pyi]
+from typing import overload
+class A:
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, x: int) -> None: pass
+ at overload
+def f() -> None: pass
+ at overload
+def f(x: int) -> None: pass
+[builtins fixtures/tuple.pyi]
+[out2]
+tmp/a.py:4: error: Incompatible types in assignment (expression has type overloaded function, variable has type "type")
+
+[case testSerializeNamedTupleInMethod4]
+from ntcrash import C
+reveal_type(C().a)
+reveal_type(C().b)
+reveal_type(C().c)
+[file ntcrash.py]
+from typing import NamedTuple
+class C:
+    def __init__(self) -> None:
+        A = NamedTuple('A', [('x', int)])
+        self.a = A(0)
+        self.b = A(0)  # type: A
+        self.c = A
+[out1]
+main:2: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+main:3: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+main:4: error: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+[out2]
+main:2: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+main:3: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+main:4: error: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+
+--
+-- Strict optional
+--
+
+[case testSerializeOptionalType]
+# flags: --strict-optional
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+reveal_type(b.x)
+b.f(b.x)
+[file b.py]
+from typing import Optional
+x: Optional[int]
+def f(x: int) -> None: pass
+[out2]
+tmp/a.py:2: error: Revealed type is 'Union[builtins.int, builtins.None]'
+tmp/a.py:3: error: Argument 1 to "f" has incompatible type "Optional[int]"; expected "int"
+
+--
+-- # type: ignore
+--
+
+[case testSerializeIgnoredUndefinedType]
+import b
+reveal_type(b.x)
+[file b.py]
+x: NonExistent  # type: ignore
+[out1]
+main:2: error: Revealed type is 'Any'
+[out2]
+main:2: error: Revealed type is 'Any'
+
+[case testSerializeIgnoredInvalidType]
+import b
+reveal_type(b.x)
+[file b.py]
+A = 0
+x: A  # type: ignore
+[out1]
+main:2: error: Revealed type is 'A?'
+[out2]
+main:2: error: Revealed type is 'A?'
+
+[case testSerializeIgnoredMissingBaseClass]
+import b
+reveal_type(b.B())
+reveal_type(b.B().x)
+[file b.py]
+class B(A): pass  # type: ignore
+[out1]
+main:2: error: Revealed type is 'b.B'
+main:3: error: Revealed type is 'Any'
+[out2]
+main:2: error: Revealed type is 'b.B'
+main:3: error: Revealed type is 'Any'
+
+[case testSerializeIgnoredInvalidBaseClass]
+import b
+reveal_type(b.B())
+reveal_type(b.B().x)
+[file b.py]
+A = 0
+class B(A): pass  # type: ignore
+[out1]
+main:2: error: Revealed type is 'b.B'
+main:3: error: Revealed type is 'Any'
+[out2]
+main:2: error: Revealed type is 'b.B'
+main:3: error: Revealed type is 'Any'
+
+[case testSerializeIgnoredImport]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+reveal_type(b.m)
+reveal_type(b.x)
+[file b.py]
+import m  # type: ignore
+from m import x  # type: ignore
+[out2]
+tmp/a.py:2: error: Revealed type is 'Any'
+tmp/a.py:3: error: Revealed type is 'Any'
+
+--
+-- TypeVar
+--
+
+[case testSerializeSimpleTypeVar]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+def f(x: b.T) -> b.T: return x
+reveal_type(f)
+[file b.py]
+from typing import TypeVar
+T = TypeVar('T')
+[out2]
+tmp/a.py:3: error: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1'
+
+[case testSerializeBoundedTypeVar]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+def f(x: b.T) -> b.T: return x
+reveal_type(f)
+reveal_type(b.g)
+[file b.py]
+from typing import TypeVar
+T = TypeVar('T', bound=int)
+def g(x: T) -> T: return x
+[out2]
+tmp/a.py:3: error: Revealed type is 'def [b.T <: builtins.int] (x: b.T`-1) -> b.T`-1'
+tmp/a.py:4: error: Revealed type is 'def [T <: builtins.int] (x: T`-1) -> T`-1'
+
+[case testSerializeTypeVarWithValues]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+def f(x: b.T) -> b.T: return x
+reveal_type(f)
+reveal_type(b.g)
+[file b.py]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def g(x: T) -> T: return x
+[out2]
+tmp/a.py:3: error: Revealed type is 'def [b.T in (builtins.int, builtins.str)] (x: b.T`-1) -> b.T`-1'
+tmp/a.py:4: error: Revealed type is 'def [T in (builtins.int, builtins.str)] (x: T`-1) -> T`-1'
+
+[case testSerializeTypeVarInClassBody]
+import a
+[file a.py]
+import b
+[file a.py.2]
+from b import A
+def f(x: A.T) -> A.T: return x
+reveal_type(f)
+[file b.py]
+from typing import TypeVar
+class A:
+    T = TypeVar('T', int, str)
+[out2]
+tmp/a.py:3: error: Revealed type is 'def [A.T in (builtins.int, builtins.str)] (x: A.T`-1) -> A.T`-1'
+
+--
+-- NewType
+--
+
+[case testSerializeNewType]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+y: b.N
+y = 1
+i = y
+b.x = 1
+b.x = y
+y = b.N(1)
+y = b.N('')
+[file b.py]
+from typing import NewType
+N = NewType('N', int)
+x: N
+[out2]
+tmp/a.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "N")
+tmp/a.py:5: error: Incompatible types in assignment (expression has type "int", variable has type "N")
+tmp/a.py:8: error: Argument 1 to "N" has incompatible type "str"; expected "int"
+
+--
+-- Named tuples
+--
+
+[case testSerializeNamedTuple]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+from typing import Tuple
+y: b.N
+t: Tuple[int]
+y = t
+b.x = t
+t = y
+b.x = t
+reveal_type(b.N(x=1))
+reveal_type(y[0])
+b.N(x='')
+[file b.py]
+from typing import NamedTuple
+N = NamedTuple('N', [('x', int)])
+x: N
+[out2]
+tmp/a.py:5: error: Incompatible types in assignment (expression has type "Tuple[int]", variable has type "N")
+tmp/a.py:6: error: Incompatible types in assignment (expression has type "Tuple[int]", variable has type "N")
+tmp/a.py:9: error: Revealed type is 'Tuple[builtins.int, fallback=b.N]'
+tmp/a.py:10: error: Revealed type is 'builtins.int'
+tmp/a.py:11: error: Argument 1 to "N" has incompatible type "str"; expected "int"
+
+--
+-- Types and type aliases
+--
+
+[case testSerializeTypeAliases]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+d: b.D
+a: b.A
+u: b.U
+l: b.L
+t: b.T
+c: b.C
+ty: b.Ty
+reveal_type(d)
+reveal_type(a)
+reveal_type(u)
+reveal_type(l)
+reveal_type(t)
+reveal_type(c)
+reveal_type(ty)
+c2: b.C2
+reveal_type(c2)
+ty2: b.Ty2
+reveal_type(ty2)
+[file b.py]
+from typing import Any, Union, List, Tuple, Callable, Type
+class DD: pass
+D = DD
+A = Any
+U = Union[int, str]
+L = List[int]
+T = Tuple[int, str]
+C = Callable[[int], str]
+C2 = Callable[..., str]
+Ty = Type[int]
+Ty2 = type
+[builtins fixtures/list.pyi]
+[out2]
+tmp/a.py:9: error: Revealed type is 'b.DD'
+tmp/a.py:10: error: Revealed type is 'Any'
+tmp/a.py:11: error: Revealed type is 'Union[builtins.int, builtins.str]'
+tmp/a.py:12: error: Revealed type is 'builtins.list[builtins.int]'
+tmp/a.py:13: error: Revealed type is 'Tuple[builtins.int, builtins.str]'
+tmp/a.py:14: error: Revealed type is 'def (builtins.int) -> builtins.str'
+tmp/a.py:15: error: Revealed type is 'Type[builtins.int]'
+tmp/a.py:17: error: Revealed type is 'def (*Any, **Any) -> builtins.str'
+tmp/a.py:19: error: Revealed type is 'builtins.type'
+
+[case testSerializeGenericTypeAlias]
+import b
+from b import X  # Work around https://github.com/python/mypy/issues/2887
+t: b.Y[int]
+reveal_type(t)
+[file b.py]
+from typing import TypeVar, Tuple
+X = TypeVar('X')
+Y = Tuple[X, str]
+[builtins fixtures/tuple.pyi]
+[out1]
+main:4: error: Revealed type is 'Tuple[builtins.int, builtins.str]'
+[out2]
+main:4: error: Revealed type is 'Tuple[builtins.int, builtins.str]'
+
+[case testSerializeTuple]
+# Don't repreat types tested by testSerializeTypeAliases here.
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+reveal_type(b.x)
+reveal_type(b.y)
+[file b.py]
+from typing import Tuple
+x: Tuple[int, ...]
+y: tuple
+[builtins fixtures/tuple.pyi]
+[out2]
+tmp/a.py:2: error: Revealed type is 'builtins.tuple[builtins.int]'
+tmp/a.py:3: error: Revealed type is 'builtins.tuple[Any]'
+
+[case testSerializeNone]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+reveal_type(b.x)
+[file b.py]
+x: None
+[out2]
+tmp/a.py:2: error: Revealed type is 'builtins.None'
+
+--
+-- TypedDict
+--
+
+[case testSerializeTypedDictInMethod]
+from ntcrash import C
+reveal_type(C().a)
+reveal_type(C().b)
+reveal_type(C().c)
+[file ntcrash.py]
+from mypy_extensions import TypedDict
+class C:
+    def __init__(self) -> None:
+        A = TypedDict('A', {'x': int})
+        self.a = A(x=0)
+        self.b = A(x=0)  # type: A
+        self.c = A
+[builtins fixtures/dict.pyi]
+[out1]
+main:2: error: Revealed type is 'TypedDict('ntcrash.C.A at 4', {'x': builtins.int})'
+main:3: error: Revealed type is 'TypedDict('ntcrash.C.A at 4', {'x': builtins.int})'
+main:4: error: Revealed type is 'def () -> ntcrash.C.A at 4'
+[out2]
+main:2: error: Revealed type is 'TypedDict('ntcrash.C.A at 4', {'x': builtins.int})'
+main:3: error: Revealed type is 'TypedDict('ntcrash.C.A at 4', {'x': builtins.int})'
+main:4: error: Revealed type is 'def () -> ntcrash.C.A at 4'
+
+[case testSerializeNonTotalTypedDict]
+from m import d
+reveal_type(d)
+[file m.py]
+from mypy_extensions import TypedDict
+D = TypedDict('D', {'x': int, 'y': str}, total=False)
+d: D
+[builtins fixtures/dict.pyi]
+[out1]
+main:2: error: Revealed type is 'TypedDict('m.D', {'x'?: builtins.int, 'y'?: builtins.str})'
+[out2]
+main:2: error: Revealed type is 'TypedDict('m.D', {'x'?: builtins.int, 'y'?: builtins.str})'
+
+--
+-- Modules
+--
+
+[case testSerializeImport]
+import b
+b.c.f()
+b.c.g()
+[file b.py]
+import c
+[file c.py]
+def f() -> None: pass
+def g(x: int) -> None: pass
+[out1]
+main:3: error: Too few arguments for "g"
+[out2]
+main:3: error: Too few arguments for "g"
+
+[case testSerializeImportAs]
+import b
+b.d.f()
+b.d.g()
+[file b.py]
+import c as d
+[file c.py]
+def f() -> None: pass
+def g(x: int) -> None: pass
+[out1]
+main:3: error: Too few arguments for "g"
+[out2]
+main:3: error: Too few arguments for "g"
+
+[case testSerializeFromImportedClass]
+import b
+b.A(1)
+reveal_type(b.A())
+[file b.py]
+from c import A
+[file c.py]
+class A: pass
+[out1]
+main:2: error: Too many arguments for "A"
+main:3: error: Revealed type is 'c.A'
+[out2]
+main:2: error: Too many arguments for "A"
+main:3: error: Revealed type is 'c.A'
+
+[case testSerializeFromImportedClassAs]
+import b
+b.B(1)
+reveal_type(b.B())
+[file b.py]
+from c import A as B
+[file c.py]
+class A: pass
+[out1]
+main:2: error: Too many arguments for "A"
+main:3: error: Revealed type is 'c.A'
+[out2]
+main:2: error: Too many arguments for "A"
+main:3: error: Revealed type is 'c.A'
+
+[case testSerializeFromImportedModule]
+import b
+b.d.f()
+b.d.g()
+[file b.py]
+from c import d
+[file c/__init__.py]
+[file c/d.py]
+def f() -> None: pass
+def g(x: int) -> None: pass
+[out1]
+main:3: error: Too few arguments for "g"
+[out2]
+main:3: error: Too few arguments for "g"
+
+[case testSerializeQualifiedImport]
+import b
+b.c.d.f()
+b.c.d.g()
+[file b.py]
+import c.d
+[file c/__init__.py]
+[file c/d.py]
+def f() -> None: pass
+def g(x: int) -> None: pass
+[out1]
+main:3: error: Too few arguments for "g"
+[out2]
+main:3: error: Too few arguments for "g"
+
+[case testSerializeQualifiedImportAs]
+import b
+b.e.f()
+b.e.g()
+[file b.py]
+import c.d as e
+[file c/__init__.py]
+[file c/d.py]
+def f() -> None: pass
+def g(x: int) -> None: pass
+[out1]
+main:3: error: Too few arguments for "g"
+[out2]
+main:3: error: Too few arguments for "g"
+
+[case testSerialize__init__ModuleImport]
+import b
+b.c.f()
+b.c.g()
+a: b.c.d.A
+reveal_type(a)
+[file b.py]
+import c
+[file c/__init__.py]
+import d
+def f() -> None: pass
+def g(x: int) -> None: pass
+[file d.py]
+class A: pass
+[out1]
+main:3: error: Too few arguments for "g"
+main:5: error: Revealed type is 'd.A'
+[out2]
+main:3: error: Too few arguments for "g"
+main:5: error: Revealed type is 'd.A'
+
+[case testSerializeImportInClassBody]
+import b
+b.A.c.f()
+b.A.c.g()
+[file b.py]
+class A:
+    import c
+[file c.py]
+def f() -> None: pass
+def g(x: int) -> None: pass
+[out1]
+main:3: error: Too few arguments for "g"
+[out2]
+main:3: error: Too few arguments for "g"
+
+[case testSerializeImportedTypeAlias]
+import b
+x: b.B
+reveal_type(x)
+[file b.py]
+from c import B
+[file c.py]
+from typing import Any
+class A: pass
+B = A
+[out1]
+main:3: error: Revealed type is 'c.A'
+[out2]
+main:3: error: Revealed type is 'c.A'
+
+[case testSerializeStarImport]
+import a
+[file a.py]
+import b
+[file a.py.2]
+import b
+b.f(1)
+x: b.A
+reveal_type(x)
+[file b.py]
+from c import *
+[file c.py]
+def f() -> None: pass
+class A: pass
+[out2]
+tmp/a.py:2: error: Too many arguments for "f"
+tmp/a.py:4: error: Revealed type is 'c.A'
+
+[case testSerializeRelativeImport]
+import b.c
+b.c.f(1)
+[file b/__init__.py]
+[file b/c.py]
+from .d import f
+[file b/d.py]
+def f() -> None: pass
+[out1]
+main:2: error: Too many arguments for "f"
+[out2]
+main:2: error: Too many arguments for "f"
diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test
new file mode 100644
index 0000000..8c1f85b
--- /dev/null
+++ b/test-data/unit/check-statements.test
@@ -0,0 +1,1559 @@
+-- Return statement
+-- ----------------
+
+
+[case testReturnValue]
+import typing
+def f() -> 'A':
+    return A()
+def g() -> 'B':
+    return A()
+class A:
+    pass
+class B:
+    pass
+[out]
+main:5: error: Incompatible return value type (got "A", expected "B")
+
+[case testReturnSubtype]
+import typing
+def f() -> 'B':
+    return A()
+def g() -> 'A':
+    return B()
+class A:
+    pass
+class B(A):
+    pass
+[out]
+main:3: error: Incompatible return value type (got "A", expected "B")
+
+[case testReturnWithoutAValue]
+import typing
+def f() -> 'A':
+    return
+def g() -> None:
+    return
+class A:
+    pass
+[out]
+main:3: error: Return value expected
+
+[case testReturnNoneInFunctionReturningNone]
+import typing
+def f() -> None:
+    return None
+def g() -> None:
+    return f()
+[out]
+
+[case testReturnInGenerator]
+from typing import Generator
+def f() -> Generator[int, None, str]:
+    yield 1
+    return "foo"
+[out]
+
+[case testEmptyReturnInGenerator]
+from typing import Generator
+def f() -> Generator[int, None, str]:
+    yield 1
+    return  # E: Return value expected
+[out]
+
+[case testNoReturnInGenerator]
+from typing import Generator
+def f() -> Generator[int, None, str]:  # E: Missing return statement
+    yield 1
+[out]
+
+[case testEmptyReturnInNoneTypedGenerator]
+from typing import Generator
+def f() -> Generator[int, None, None]:
+    yield 1
+    return
+[out]
+
+[case testNonEmptyReturnInNoneTypedGenerator]
+from typing import Generator
+def f() -> Generator[int, None, None]:
+    yield 1
+    return 42  # E: No return value expected
+[out]
+
+[case testReturnInIterator]
+from typing import Iterator
+def f() -> Iterator[int]:
+    yield 1
+    return "foo"
+[out]
+
+
+-- If statement
+-- ------------
+
+
+[case testIfStatement]
+
+a = None # type: A
+a2 = None # type: A
+a3 = None # type: A
+b = None # type: bool
+if a:
+    a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
+elif a2:
+    a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
+elif a3:
+    a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
+else:
+    a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
+if b:
+    pass
+elif b:
+    pass
+if b:
+    pass
+
+class A: pass
+[builtins fixtures/bool.pyi]
+
+
+-- Loops
+-- -----
+
+
+[case testWhileStatement]
+
+a = None # type: A
+b = None # type: bool
+while a:
+    a = b    # Fail
+else:
+    a = b    # Fail
+while b:
+    b = b
+
+class A: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:7: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testForStatement]
+
+a = None # type: A
+b = None # type: object
+for a in [A()]:
+    a = b    # Fail
+else:
+    a = b    # Fail
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+main:7: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+[case testBreakStatement]
+import typing
+while None:
+    break
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testContinueStatement]
+import typing
+while None:
+    continue
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testForStatementTypeComments]
+
+from typing import List, Union
+x = []  # type: List[int]
+
+for y in x:  # type: str  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    pass
+
+for z in x:  # type: int
+    pass
+
+for w in x:  # type: Union[int, str]
+    reveal_type(w)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+
+for v in x:  # type: int, int  # E: Invalid tuple literal type
+    pass
+[builtins fixtures/list.pyi]
+
+[case testForStatementMultipleTypeComments]
+
+from typing import List, Tuple
+x = []  # type: List[Tuple[int, int]]
+
+for y in x:  # type: int, int  # E: Invalid tuple literal type
+    pass
+
+for z in x:  # type: Tuple[int, int]
+    pass
+
+for w,v in x:  # type: int, str  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    pass
+
+for a, b in x:  # type: int, int, int  # E: Incompatible number of tuple items
+    pass
+[builtins fixtures/list.pyi]
+
+
+-- Operator assignment
+-- -------------------
+
+
+[case testPlusAssign]
+
+a, b, c = None, None, None # type: (A, B, C)
+a += b   # Fail
+b += a   # Fail
+c += a   # Fail
+a += c
+
+class A:
+    def __add__(self, x: 'C') -> 'A': pass
+
+class B:
+    def __add__(self, x: A) -> 'C': pass
+
+class C: pass
+[out]
+main:3: error: Unsupported operand types for + ("A" and "B")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+main:5: error: Unsupported left operand type for + ("C")
+
+[case testMinusAssign]
+
+a, b, c = None, None, None # type: (A, B, C)
+a -= b   # Fail
+b -= a   # Fail
+c -= a   # Fail
+a -= c
+
+class A:
+    def __sub__(self, x: 'C') -> 'A': pass
+
+class B:
+    def __sub__(self, x: A) -> 'C': pass
+
+class C: pass
+[out]
+main:3: error: Unsupported operand types for - ("A" and "B")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+main:5: error: Unsupported left operand type for - ("C")
+
+[case testMulAssign]
+
+a, c = None, None # type: (A, C)
+a *= a   # Fail
+c *= a   # Fail
+a *= c
+
+class A:
+    def __mul__(self, x: 'C') -> 'A': pass
+
+class C: pass
+[out]
+main:3: error: Unsupported operand types for * ("A" and "A")
+main:4: error: Unsupported left operand type for * ("C")
+
+[case testMatMulAssign]
+a, c = None, None # type: (A, C)
+a @= a   # E: Unsupported operand types for @ ("A" and "A")
+c @= a   # E: Unsupported left operand type for @ ("C")
+a @= c
+
+class A:
+    def __matmul__(self, x: 'C') -> 'A': pass
+
+class C: pass
+
+[case testDivAssign]
+
+a, c = None, None # type: (A, C)
+a /= a   # Fail
+c /= a   # Fail
+a /= c
+
+class A:
+    def __truediv__(self, x: 'C') -> 'A': pass
+
+class C: pass
+[out]
+main:3: error: Unsupported operand types for / ("A" and "A")
+main:4: error: Unsupported left operand type for / ("C")
+
+[case testPowAssign]
+
+a, c = None, None # type: (A, C)
+a **= a   # Fail
+c **= a   # Fail
+a **= c
+
+class A:
+    def __pow__(self, x: 'C') -> 'A': pass
+
+class C: pass
+[out]
+main:3: error: Unsupported operand types for ** ("A" and "A")
+main:4: error: Unsupported left operand type for ** ("C")
+
+[case testSubtypesInOperatorAssignment]
+
+a, b = None, None # type: (A, B)
+b += b
+b += a
+a += b
+
+class A:
+    def __add__(self, x: 'A') -> 'B': pass
+
+class B(A): pass
+[out]
+
+[case testAdditionalOperatorsInOpAssign]
+
+a, c = None, None # type: (A, C)
+a &= a  # Fail
+a >>= a # Fail
+a //= a # Fail
+a &= c
+a >>= c
+a //= c
+class A:
+    def __and__(self, x: 'C') -> 'A': pass
+    def __rshift__(self, x: 'C') -> 'A': pass
+    def __floordiv__(self, x: 'C') -> 'A': pass
+class C: pass
+[out]
+main:3: error: Unsupported operand types for & ("A" and "A")
+main:4: error: Unsupported operand types for >> ("A" and "A")
+main:5: error: Unsupported operand types for // ("A" and "A")
+
+[case testInplaceOperatorMethods]
+import typing
+class A:
+    def __iadd__(self, x: int) -> 'A': pass
+    def __imul__(self, x: str) -> 'A': pass
+    def __imatmul__(self, x: str) -> 'A': pass
+a = A()
+a += 1
+a *= ''
+a @= ''
+a += '' # E: Argument 1 to "__iadd__" of "A" has incompatible type "str"; expected "int"
+a *= 1  # E: Argument 1 to "__imul__" of "A" has incompatible type "int"; expected "str"
+a @= 1  # E: Argument 1 to "__imatmul__" of "A" has incompatible type "int"; expected "str"
+
+[case testInplaceSetitem]
+class A(object):
+    def __init__(self):
+        self.a = 0
+
+    def __iadd__(self, a):
+        # type: (int) -> A
+        self.a += 1
+        return self
+
+a = A()
+b = [a]
+b[0] += 1
+[builtins fixtures/list.pyi]
+[out]
+
+
+-- Assert statement
+-- ----------------
+
+
+[case testAssert]
+import typing
+assert None + None # Fail
+assert None
+[out]
+main:2: error: Unsupported left operand type for + (None)
+
+
+-- Exception handling
+-- ------------------
+
+
+[case testRaiseStatement]
+
+e = None # type: BaseException
+f = None # type: MyError
+a = None # type: A
+raise a # Fail
+raise e
+raise f
+class A: pass
+class MyError(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+main:5: error: Exception must be derived from BaseException
+
+[case testRaiseClassobject]
+import typing
+class A: pass
+class MyError(BaseException): pass
+def f(): pass
+raise BaseException
+raise MyError
+raise A # E: Exception must be derived from BaseException
+raise object # E: Exception must be derived from BaseException
+raise f # E: Exception must be derived from BaseException
+[builtins fixtures/exception.pyi]
+
+[case testRaiseFromStatement]
+
+e = None # type: BaseException
+f = None # type: MyError
+a = None # type: A
+raise e from a # E: Exception must be derived from BaseException
+raise e from e
+raise e from f
+class A: pass
+class MyError(BaseException): pass
+[builtins fixtures/exception.pyi]
+
+[case testRaiseFromClassobject]
+import typing
+class A: pass
+class MyError(BaseException): pass
+def f(): pass
+raise BaseException from BaseException
+raise BaseException from MyError
+raise BaseException from A # E: Exception must be derived from BaseException
+raise BaseException from object # E: Exception must be derived from BaseException
+raise BaseException from f # E: Exception must be derived from BaseException
+[builtins fixtures/exception.pyi]
+
+[case testTryFinallyStatement]
+import typing
+try:
+    b = object() # type: A # Fail
+finally:
+    c = object() # type: A # Fail
+class A: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+[case testSimpleTryExcept]
+
+try:
+  pass
+except BaseException as e:
+  a, o = None, None # type: (BaseException, object)
+  e = a
+  e = o # Fail
+class A: pass
+class B: pass
+[builtins fixtures/exception.pyi]
+[out]
+main:7: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
+
+[case testTypeErrorInBlock]
+
+while object:
+  x = None # type: A
+  x = object()
+  x = B()
+class A: pass
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testTypeErrorInvolvingBaseException]
+
+x, a = None, None # type: (BaseException, A)
+a = BaseException()  # Fail
+a = object()         # Fail
+x = object()         # Fail
+x = A()              # Fail
+x = BaseException()
+class A: pass
+[builtins fixtures/exception.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "BaseException", variable has type "A")
+main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
+main:6: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException")
+
+[case testSimpleTryExcept2]
+import typing
+try:
+  pass
+except BaseException as e:
+  e = object() # Fail
+  e = BaseException()
+[builtins fixtures/exception.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
+
+[case testBaseClassAsExceptionTypeInExcept]
+import typing
+try:
+  pass
+except Err as e:
+  e = BaseException() # Fail
+  e = Err()
+class Err(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
+
+[case testMultipleExceptHandlers]
+import typing
+try:
+    pass
+except BaseException as e:
+    pass
+except Err as f:
+    f = BaseException() # Fail
+    f = Err()
+class Err(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+main:7: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
+
+[case testTryExceptStatement]
+import typing
+try:
+    a = B() # type: A       # Fail
+except BaseException as e:
+    e = A()             # Fail
+    e = Err()
+except Err as f:
+    f = BaseException() # Fail
+    f = Err()
+class A: pass
+class B: pass
+class Err(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException")
+main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
+
+[case testTryExceptWithinFunction]
+import typing
+def f() -> None:
+  try: pass
+  except BaseException as e:
+    e = object() # Fail
+    e = BaseException()
+  except Err as f:
+    f = BaseException() # Fail
+    f = Err()
+class Err(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
+main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
+
+[case testTryExceptFlow]
+def f() -> None:
+  x = 1
+  try:
+    pass
+  except:
+    raise
+  x + 'a' # E: Unsupported left operand type for + ("int")
+[builtins fixtures/exception.pyi]
+[out]
+
+[case testTryWithElse]
+import typing
+try: pass
+except BaseException: pass
+else:
+  object(None) # E: Too many arguments for "object"
+[builtins fixtures/exception.pyi]
+
+[case testRedefinedFunctionInTryWithElse]
+def f() -> None: pass
+try:
+    pass
+except BaseException:
+    f2 = f
+else:
+    def f2() -> str: pass
+try:
+    pass
+except BaseException:
+    f3 = f
+else:
+    def f3() -> None: pass
+[builtins fixtures/exception.pyi]
+[out]
+main:7: error: Incompatible redefinition (redefinition with type Callable[[], str], original type Callable[[], None])
+
+[case testExceptWithoutType]
+import typing
+try:
+    -None # E: Unsupported operand type for unary - (None)
+except:
+    ~None # E: Unsupported operand type for ~ (None)
+[builtins fixtures/exception.pyi]
+
+[case testRaiseWithoutArgument]
+import typing
+try:
+    None
+except:
+    raise
+[builtins fixtures/exception.pyi]
+
+[case testExceptWithMultipleTypes]
+import typing
+class E1(BaseException): pass
+class E2(E1): pass
+try:
+    pass
+except (E1, E2): pass
+except (E1, object): pass # E: Exception type must be derived from BaseException
+except (object, E2): pass # E: Exception type must be derived from BaseException
+except (E1, (E2,)): pass  # E: Exception type must be derived from BaseException
+
+except (E1, E2): pass
+except ((E1, E2)): pass
+except (((E1, E2))): pass
+[builtins fixtures/exception.pyi]
+
+[case testExceptWithMultipleTypes2]
+import typing
+class E1(BaseException): pass
+class E2(E1): pass
+try:
+    pass
+except (E1, E2) as e1:
+    x = e1 # type: E1
+    y = e1 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2")
+except (E2, E1) as e2:
+    a = e2 # type: E1
+    b = e2 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2")
+except (E1, E2, int) as e3: # E: Exception type must be derived from BaseException
+    pass
+[builtins fixtures/exception.pyi]
+
+[case testExceptWithMultipleTypes3]
+import typing
+class E1(BaseException): pass
+class E1_1(E1): pass
+class E1_2(E1): pass
+try: pass
+except (E1, E1_1, E1_2) as e1:
+    x = e1 # type: E1
+    y = e1 # type: E1_1 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_1")
+    z = e1 # type: E1_2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_2")
+except (E1_1, E1_2) as e2:
+    a = e2 # type: E1
+    b = e2 # type: E1_1 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_1")
+    c = e2 # type: E1_2 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_2")
+[builtins fixtures/exception.pyi]
+
+[case testExceptWithMultipleTypes4]
+from typing import Tuple, Type, Union
+
+class E1(BaseException): pass
+class E2(BaseException): pass
+class E3(BaseException): pass
+
+def variadic(exc: Tuple[Type[E1], ...]) -> None:
+    try:
+        pass
+    except exc as e:
+        reveal_type(e)  # E: Revealed type is '__main__.E1'
+
+def union(exc: Union[Type[E1], Type[E2]]) -> None:
+    try:
+        pass
+    except exc as e:
+        reveal_type(e)  # E: Revealed type is 'Union[__main__.E1, __main__.E2]'
+
+def tuple_in_union(exc: Union[Type[E1], Tuple[Type[E2], Type[E3]]]) -> None:
+    try:
+        pass
+    except exc as e:
+        reveal_type(e)  # E: Revealed type is 'Union[__main__.E1, __main__.E2, __main__.E3]'
+
+def variadic_in_union(exc: Union[Type[E1], Tuple[Type[E2], ...]]) -> None:
+    try:
+        pass
+    except exc as e:
+        reveal_type(e)  # E: Revealed type is 'Union[__main__.E1, __main__.E2]'
+
+def nested_union(exc: Union[Type[E1], Union[Type[E2], Type[E3]]]) -> None:
+    try:
+        pass
+    except exc as e:
+        reveal_type(e)  # E: Revealed type is 'Union[__main__.E1, __main__.E2, __main__.E3]'
+
+def error_in_union(exc: Union[Type[E1], int]) -> None:
+    try:
+        pass
+    except exc as e:  # E: Exception type must be derived from BaseException
+        pass
+
+def error_in_variadic(exc: Tuple[int, ...]) -> None:
+    try:
+        pass
+    except exc as e:  # E: Exception type must be derived from BaseException
+        pass
+
+[builtins fixtures/tuple.pyi]
+
+[case testExceptWithAnyTypes]
+from typing import Any
+
+E1 = None  # type: Any
+class E2(BaseException): pass
+class NotBaseDerived: pass
+
+try:
+    pass
+except BaseException as e1:
+    reveal_type(e1)  # E: Revealed type is 'builtins.BaseException'
+except (E1, BaseException) as e2:
+    reveal_type(e2)  # E: Revealed type is 'Union[Any, builtins.BaseException]'
+except (E1, E2) as e3:
+    reveal_type(e3)  # E: Revealed type is 'Union[Any, __main__.E2]'
+except (E1, E2, BaseException) as e4:
+    reveal_type(e4)  # E: Revealed type is 'Union[Any, builtins.BaseException]'
+
+try: pass
+except E1 as e1:
+    reveal_type(e1)  # E: Revealed type is 'Any'
+except E2 as e2:
+    reveal_type(e2)  # E: Revealed type is '__main__.E2'
+except NotBaseDerived as e3:  # E: Exception type must be derived from BaseException
+    pass
+except (NotBaseDerived, E1) as e4:  # E: Exception type must be derived from BaseException
+    pass
+except (NotBaseDerived, E2) as e5:  # E: Exception type must be derived from BaseException
+    pass
+except (NotBaseDerived, E1, E2) as e6:  # E: Exception type must be derived from BaseException
+    pass
+except (E1, E2, NotBaseDerived) as e6:  # E: Exception type must be derived from BaseException
+    pass
+[builtins fixtures/exception.pyi]
+
+[case testReuseTryExceptionVariable]
+import typing
+class E1(BaseException): pass
+class E2(BaseException): pass
+try: pass
+except E1 as e: pass
+try: pass
+except E1 as e: pass
+try: pass
+except E2 as e: pass
+e + 1 # E: Trying to read deleted variable 'e'
+e = E1() # E: Assignment to variable 'e' outside except: block
+[builtins fixtures/exception.pyi]
+
+[case testReuseDefinedTryExceptionVariable]
+import typing
+class E1(BaseException): pass
+class E2(BaseException): pass
+e = 1
+e = 1
+try: pass
+except E1 as e: pass
+e = 1 # E: Assignment to variable 'e' outside except: block
+e = E1() # E: Assignment to variable 'e' outside except: block
+[builtins fixtures/exception.pyi]
+
+[case testExceptionVariableReuseInDeferredNode1]
+def f(*a: BaseException) -> int:
+    x
+    try: pass
+    except BaseException as err: pass
+    try: pass
+    except BaseException as err: f(err)
+    return 0
+x = f()
+[builtins fixtures/exception.pyi]
+
+[case testExceptionVariableReuseInDeferredNode2]
+def f(*a: BaseException) -> int:
+    try: pass
+    except BaseException as err: pass
+    x
+    try: pass
+    except BaseException as err: f(err)
+    return 0
+x = f()
+[builtins fixtures/exception.pyi]
+
+[case testExceptionVariableReuseInDeferredNode3]
+def f(*a: BaseException) -> int:
+    try: pass
+    except BaseException as err: pass
+    try: pass
+    except BaseException as err: f(err)
+    x
+    return 0
+x = f()
+[builtins fixtures/exception.pyi]
+
+[case testExceptionVariableReuseInDeferredNode4]
+class EA(BaseException):
+    a = None  # type: int
+class EB(BaseException):
+    b = None  # type: str
+def f(*arg: BaseException) -> int:
+    x
+    try: pass
+    except EA as err:
+        f(err)
+        a = err.a
+        reveal_type(a)
+    try: pass
+    except EB as err:
+        f(err)
+        b = err.b
+        reveal_type(b)
+    return 0
+x = f()
+[builtins fixtures/exception.pyi]
+[out]
+main:11: error: Revealed type is 'builtins.int'
+main:16: error: Revealed type is 'builtins.str'
+
+[case testExceptionVariableReuseInDeferredNode5]
+class EA(BaseException):
+    a = None  # type: int
+class EB(BaseException):
+    b = None  # type: str
+def f(*arg: BaseException) -> int:
+    try: pass
+    except EA as err:
+        f(err)
+        a = err.a
+        reveal_type(a)
+    x
+    try: pass
+    except EB as err:
+        f(err)
+        b = err.b
+        reveal_type(b)
+    return 0
+x = f()
+[builtins fixtures/exception.pyi]
+[out]
+main:10: error: Revealed type is 'builtins.int'
+main:16: error: Revealed type is 'builtins.str'
+
+[case testExceptionVariableReuseInDeferredNode6]
+class EA(BaseException):
+    a = None  # type: int
+class EB(BaseException):
+    b = None  # type: str
+def f(*arg: BaseException) -> int:
+    try: pass
+    except EA as err:
+        f(err)
+        a = err.a
+        reveal_type(a)
+    try: pass
+    except EB as err:
+        f(err)
+        b = err.b
+        reveal_type(b)
+    x
+    return 0
+x = f()
+[builtins fixtures/exception.pyi]
+[out]
+main:10: error: Revealed type is 'builtins.int'
+main:15: error: Revealed type is 'builtins.str'
+
+[case testArbitraryExpressionAsExceptionType]
+import typing
+a = BaseException
+try: pass
+except a as b:
+    b = BaseException()
+    b = object() # E: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
+[builtins fixtures/exception.pyi]
+
+[case testInvalidExceptionCallable]
+import typing
+def exc() -> BaseException: pass
+try: pass
+except exc as e: pass             # E: Exception type must be derived from BaseException
+except BaseException() as b: pass # E: Exception type must be derived from BaseException
+[builtins fixtures/exception.pyi]
+
+[case testTupleValueAsExceptionType]
+import typing
+def exc() -> BaseException: pass
+class E1(BaseException): pass
+class E1_1(E1): pass
+class E1_2(E1): pass
+
+exs1 = (E1, E1_1, E1_2)
+try: pass
+except exs1 as e1:
+    x = e1 # type: E1
+    y = e1 # type: E1_1 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_1")
+    z = e1 # type: E1_2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_2")
+
+exs2 = (E1_1, E1_2)
+try: pass
+except exs2 as e2:
+    a = e2 # type: E1
+    b = e2 # type: E1_1 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_1")
+    c = e2 # type: E1_2 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_2")
+
+exs3 = (E1, (E1_1, (E1_2,)))
+try: pass
+except exs3 as e3: pass  # E: Exception type must be derived from BaseException
+[builtins fixtures/exception.pyi]
+
+[case testInvalidTupleValueAsExceptionType]
+import typing
+def exc() -> BaseException: pass
+class E1(BaseException): pass
+class E2(E1): pass
+
+exs1 = (E1, E2, int)
+try: pass
+except exs1 as e: pass # E: Exception type must be derived from BaseException
+[builtins fixtures/exception.pyi]
+
+[case testOverloadedExceptionType]
+from foo import *
+[file foo.pyi]
+from typing import overload
+class E(BaseException):
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, x) -> None: pass
+try:
+    pass
+except E as e:
+    e = E()
+    e = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "E")
+[builtins fixtures/exception.pyi]
+
+[case testExceptionWithAnyBaseClass]
+from typing import Any
+E = None  # type: Any
+class EE(E): pass
+raise EE()
+raise EE
+[builtins fixtures/exception.pyi]
+
+[case testExceptionIsType]
+from typing import Type
+class B(BaseException): pass
+def f(e: Type[B]):
+    try: pass
+    except e: pass
+def g(e: Type[BaseException]):
+    try: pass
+    except e as err:
+        reveal_type(err)
+def h(e: Type[int]):
+    try: pass
+    except e: pass
+[builtins fixtures/exception.pyi]
+[out]
+main:9: error: Revealed type is 'builtins.BaseException'
+main:12: error: Exception type must be derived from BaseException
+
+
+-- Del statement
+-- -------------
+
+
+[case testDelStmtWithIndex]
+a, b = None, None # type: (A, B)
+del b[a]
+del b[b] # E: Argument 1 to "__delitem__" of "B" has incompatible type "B"; expected "A"
+del a[a] # E: "A" has no attribute "__delitem__"
+del a[b] # E: "A" has no attribute "__delitem__"
+class B:
+  def __delitem__(self, index: 'A'): pass
+class A: pass
+
+[case testDelStmtWithAttribute]
+class A:
+    def f(self): pass
+    x = 0
+a = A()
+del a.f
+del a.x
+del a.z # E: "A" has no attribute "z"
+
+[case testDelStatementWithTuple]
+class A:
+    x = 0
+a = A()
+del a.x, a.y # E: "A" has no attribute "y"
+
+
+[case testDelStatementWithAssignmentSimple]
+a = 1
+a + 1
+del a
+a + 1 # E: Trying to read deleted variable 'a'
+[builtins fixtures/ops.pyi]
+
+[case testDelStatementWithAssignmentTuple]
+a = 1
+b = 1
+del (a, b)
+b + 1 # E: Trying to read deleted variable 'b'
+[builtins fixtures/ops.pyi]
+
+[case testDelStatementWithAssignmentClass]
+class C:
+    a = 1
+
+c = C()
+c.a = 1
+c.a + 1
+del c.a
+c.a + 1
+[builtins fixtures/ops.pyi]
+
+[case testDelStatementWithConditions]
+x = 5
+del x
+if x: ...  # E: Trying to read deleted variable 'x'
+
+def f(x):
+    return x
+
+if 0: ...
+elif f(x): ...  # E: Trying to read deleted variable 'x'
+
+while x == 5: ...  # E: Trying to read deleted variable 'x'
+
+-- Yield statement
+-- ---------------
+
+
+[case testSimpleYield]
+from typing import Iterator
+def f() -> Iterator[int]:
+    yield 1
+    yield '' # E: Incompatible types in yield (actual type "str", expected type "int")
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldInFunctionReturningGenerator]
+from typing import Generator
+def f() -> Generator[int, None, None]:
+    yield 1
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldInFunctionReturningIterable]
+from typing import Iterable
+def f() -> Iterable[int]:
+    yield 1
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldInFunctionReturningObject]
+def f() -> object:
+    yield 1
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldInFunctionReturningAny]
+from typing import Any
+def f() -> Any:
+    yield object()
+[out]
+
+[case testYieldInFunctionReturningFunction]
+from typing import Callable
+def f() -> Callable[[], None]: # E: The return type of a generator function should be "Generator" or one of its supertypes
+    yield object()
+[out]
+
+[case testYieldInDynamicallyTypedFunction]
+import typing
+def f():
+    yield f
+
+[case testWithInvalidInstanceReturnType]
+import typing
+def f() -> int: # E: The return type of a generator function should be "Generator" or one of its supertypes
+    yield 1
+[builtins fixtures/for.pyi]
+[out]
+
+[case testTypeInferenceContextAndYield]
+from typing import List, Iterator
+def f() -> 'Iterator[List[int]]':
+    yield []
+    yield [object()] # E: List item 0 has incompatible type "object"
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldAndReturnWithoutValue]
+from typing import Iterator
+def f() -> Iterator[int]:
+    yield 1
+    return
+[builtins fixtures/for.pyi]
+
+[case testYieldWithNoValue]
+from typing import Iterator
+def f() -> Iterator[None]:
+    yield
+[builtins fixtures/for.pyi]
+
+[case testYieldWithNoValueWhenValueRequired]
+from typing import Iterator
+def f() -> Iterator[int]:
+    yield  # E: Yield value expected
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldWithExplicitNone]
+from typing import Iterator
+def f() -> Iterator[None]:
+    yield None
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Yield from statement
+-- --------------------
+
+-- Iterables
+-- ----------
+
+[case testSimpleYieldFromWithIterator]
+from typing import Iterator
+def g() -> Iterator[str]:
+    yield '42'
+def h() -> Iterator[int]:
+    yield 42
+def f() -> Iterator[str]:
+    yield from g()
+    yield from h()  # E: Incompatible types in "yield from" (actual type "int", expected type "str")
+[out]
+
+[case testYieldFromAppliedToAny]
+from typing import Any
+def g() -> Any:
+    yield object()
+def f() -> Any:
+    yield from g()
+[out]
+
+[case testYieldFromInFunctionReturningFunction]
+from typing import Iterator, Callable
+def g() -> Iterator[int]:
+    yield 42
+def f() -> Callable[[], None]:  # E: The return type of a generator function should be "Generator" or one of its supertypes
+    yield from g()
+[out]
+
+[case testYieldFromNotIterableReturnType]
+from typing import Iterator
+def g() -> Iterator[int]:
+    yield 42
+def f() -> int:  # E: The return type of a generator function should be "Generator" or one of its supertypes
+    yield from g()
+[out]
+
+[case testYieldFromNotAppliedIterator]
+from typing import Iterator
+def g() -> int:
+    return 42
+def f() -> Iterator[int]:
+    yield from g()  # E: "yield from" can't be applied to "int"
+[out]
+
+[case testYieldFromCheckIncompatibleTypesTwoIterables]
+from typing import List, Iterator
+def g() -> Iterator[List[int]]:
+    yield [2, 3, 4]
+def f() -> Iterator[List[int]]:
+    yield from g()
+    yield from [1, 2, 3]  # E: Incompatible types in "yield from" (actual type "int", expected type List[int])
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldFromNotAppliedToNothing]
+def h():
+    yield from  # E: invalid syntax
+[out]
+
+[case testYieldFromAndYieldTogether]
+from typing import Iterator
+def f() -> Iterator[str]:
+    yield "g1 ham"
+    yield from g()
+    yield "g1 eggs"
+def g() -> Iterator[str]:
+    yield "g2 spam"
+    yield "g2 more spam"
+[out]
+
+[case testYieldFromAny]
+from typing import Iterator
+def f(a):
+    b = yield from a
+    return b
+[out]
+
+-- With statement
+-- --------------
+
+
+[case testSimpleWith]
+import typing
+class A:
+    def __enter__(self) -> None: pass
+    def __exit__(self, x, y, z) -> None: pass
+with A():
+    object(A) # E: Too many arguments for "object"
+
+[case testWithStmtAndInvalidExit]
+import typing
+class A:
+    def __enter__(self) -> None: pass
+    def __exit__(self, x, y) -> None: pass
+with A(): # E: Too many arguments for "__exit__" of "A"
+    pass
+
+[case testWithStmtAndMissingExit]
+import typing
+class A:
+    def __enter__(self) -> None: pass
+with A(): # E: "A" has no attribute "__exit__"
+    pass
+
+[case testWithStmtAndInvalidEnter]
+import typing
+class A:
+    def __enter__(self, x) -> None: pass
+    def __exit__(self, x, y, z) -> None: pass
+with A(): # E: Too few arguments for "__enter__" of "A"
+    pass
+
+[case testWithStmtAndMissingEnter]
+import typing
+class A:
+    def __exit__(self, x, y, z) -> None: pass
+with A(): # E: "A" has no attribute "__enter__"
+    pass
+
+[case testWithStmtAndMultipleExprs]
+import typing
+class A:
+    def __enter__(self) -> None: pass
+    def __exit__(self, x, y, z) -> None: pass
+class B:
+    def __enter__(self) -> None: pass
+with A(), B(): # E: "B" has no attribute "__exit__"
+    pass
+with B(), A(): # E: "B" has no attribute "__exit__"
+    pass
+
+[case testWithStmtAndResult]
+import typing
+class B: pass
+class A:
+    def __enter__(self) -> B: pass
+    def __exit__(self, x, y, z): pass
+with A() as b:
+    b = B()
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testWithStmtAndMultipleResults]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class B: pass
+class C: pass
+class A(Generic[t]):
+    def __enter__(self) -> t: pass
+    def __exit__(self, x, y, z): pass
+a_b = A() # type: A[B]
+a_c = A() # type: A[C]
+with a_b as b, a_c as c:
+    b = B()
+    c = C()
+    b = c # E: Incompatible types in assignment (expression has type "C", variable has type "B")
+    c = b # E: Incompatible types in assignment (expression has type "B", variable has type "C")
+
+[case testWithStmtAndComplexTarget]
+from typing import Tuple
+class A:
+    def __enter__(self) -> Tuple[int, str]: pass
+    def __exit__(self, x, y, z): pass
+with A() as (a, b):
+    a = 1
+    b = ''
+    a = b # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[builtins fixtures/tuple.pyi]
+
+[case testWithStmtTypeComment]
+
+from typing import Union
+class A:
+    def __enter__(self) -> int: pass
+    def __exit__(self, x, y, z): pass
+
+with A():  # type: int  # E: Invalid type comment
+    pass
+
+with A() as a:  # type: int
+    pass
+
+with A() as b:  # type: str  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    pass
+
+with A() as c:  # type: int, int  # E: Invalid tuple literal type
+    pass
+
+with A() as d:  # type: Union[int, str]
+    reveal_type(d)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+
+[case testWithStmtTupleTypeComment]
+
+from typing import Tuple
+class A:
+    def __enter__(self) -> Tuple[int, int]: pass
+    def __exit__(self, x, y, z): pass
+
+with A():
+    pass
+
+with A() as a:  # type: Tuple[int, int]
+    pass
+
+with A() as b:  # type: Tuple[int, str]  # E: Incompatible types in assignment (expression has type "Tuple[int, int]", variable has type "Tuple[int, str]")
+    pass
+
+with A() as (c, d):  # type: int, int
+    pass
+
+with A() as (e, f):  # type: Tuple[int, int]
+    pass
+
+with A() as (g, h):  # type: int  # E: Tuple type expected for multiple variables
+    pass
+
+with A() as (i, j):  # type: int, int, str  # E: Incompatible number of tuple items
+    pass
+
+with A() as (k, l):  # type: int, str  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    pass
+[builtins fixtures/tuple.pyi]
+
+[case testWithStmtComplexTypeComment]
+
+from typing import Tuple
+class A:
+    def __enter__(self) -> Tuple[int, int]: pass
+    def __exit__(self, x, y, z): pass
+
+class B:
+    def __enter__(self) -> str: pass
+    def __exit__(self, x, y, z): pass
+
+with A() as a, A() as (b, c), B() as d:  # type: Tuple[int, int], (int, int), str
+    pass
+
+with A() as e, A() as (f, g), B() as h:  # type: Tuple[int, int], Tuple[int, int], str
+    pass
+
+with A() as i, A() as (j, k), B() as l:  # type: (int, int), (int, int), str  # E: Invalid tuple literal type
+    pass
+
+with A(), A(), B() as m, A() as n, B(), B() as o:  # type: int, Tuple[int, int]  # E: Incompatible number of types for `with` targets
+    pass
+
+with A(), B(), B() as p, A(), A():  # type: str
+    pass
+[builtins fixtures/tuple.pyi]
+
+-- Chained assignment
+-- ------------------
+
+
+[case testChainedAssignment]
+import typing
+class A: pass
+class B: pass
+x = y = A()
+x = A()
+y = A()
+x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+y = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testChainedAssignment2]
+import typing
+def f() -> None:
+    x = 1
+    y = 'x'
+    x = y = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    x = y = 1   # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[builtins fixtures/primitives.pyi]
+[out]
+
+[case testChainedAssignmentWithType]
+
+x = y = None # type: int
+x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+x = 1
+y = 1
+
+
+-- Star assignment
+-- ---------------
+
+
+[case testAssignListToStarExpr]
+from typing import List
+bs, cs = None, None # type: List[A], List[B]
+*bs, b = bs
+*bs, c = cs  # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+*ns, c = cs
+nc = cs
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+
+-- Type aliases
+-- ------------
+
+
+[case testSimpleTypeAlias]
+import typing
+foo = int
+def f(x: foo) -> None: pass
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testTypeAliasDefinedInAModule]
+import typing
+import m
+def f(x: m.foo) -> None: pass
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[file m.py]
+import typing
+foo = int
+
+[case testTypeAliasDefinedInAModule2]
+import typing
+from m import foo
+def f(x: foo) -> None: pass
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[file m.py]
+import typing
+foo = int
+
+
+-- nonlocal and global
+-- -------------------
+
+
+[case testTypeOfGlobalUsed]
+import typing
+g = A()
+def f() -> None:
+    global g
+    g = B()
+
+class A(): pass
+class B(): pass
+[out]
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testTypeOfNonlocalUsed]
+import typing
+def f() -> None:
+    a = A()
+    def g() -> None:
+        nonlocal a
+        a = B()
+
+class A(): pass
+class B(): pass
+[out]
+main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testTypeOfOuterMostNonlocalUsed]
+import typing
+def f() -> None:
+    a = A()
+    def g() -> None:
+        a = B()
+        def h() -> None:
+            nonlocal a
+            a = A()
+            a = B()
+
+class A(): pass
+class B(): pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testAugmentedAssignmentIntFloat]
+weight0 = 65.5
+reveal_type(weight0)  # E: Revealed type is 'builtins.float'
+weight0 = 65
+reveal_type(weight0)  # E: Revealed type is 'builtins.int'
+weight0 *= 'a'  # E: Incompatible types in assignment (expression has type "str", variable has type "float")
+weight0 *= 0.5
+reveal_type(weight0)  # E: Revealed type is 'builtins.float'
+weight0 *= object()  # E: Unsupported operand types for * ("float" and "object")
+reveal_type(weight0) # E: Revealed type is 'builtins.float'
+
+[builtins fixtures/float.pyi]
+
+[case testAugmentedAssignmentIntFloatMember]
+class A:
+    def __init__(self) -> None:
+        self.weight0 = 65.5
+        reveal_type(self.weight0)  # E: Revealed type is 'builtins.float'
+        self.weight0 = 65
+        reveal_type(self.weight0)  # E: Revealed type is 'builtins.int'
+        self.weight0 *= 'a'  # E: Incompatible types in assignment (expression has type "str", variable has type "float")
+        self.weight0 *= 0.5
+        reveal_type(self.weight0)  # E: Revealed type is 'builtins.float'
+        self.weight0 *= object()  # E: Unsupported operand types for * ("float" and "object")
+        reveal_type(self.weight0) # E: Revealed type is 'builtins.float'
+
+[builtins fixtures/float.pyi]
+
+[case testAugmentedAssignmentIntFloatDict]
+from typing import Dict
+d = {'weight0': 65.5}
+reveal_type(d['weight0'])  # E: Revealed type is 'builtins.float*'
+d['weight0'] = 65
+reveal_type(d['weight0'])  # E: Revealed type is 'builtins.float*'
+d['weight0'] *= 'a'  # E: Unsupported operand types for * ("float" and "str") # E: Incompatible types in assignment (expression has type "str", target has type "float")
+d['weight0'] *= 0.5
+reveal_type(d['weight0'])  # E: Revealed type is 'builtins.float*'
+d['weight0'] *= object()  # E: Unsupported operand types for * ("float" and "object")
+reveal_type(d['weight0']) # E: Revealed type is 'builtins.float*'
+
+[builtins fixtures/floatdict.pyi]
+
diff --git a/test-data/unit/check-super.test b/test-data/unit/check-super.test
new file mode 100644
index 0000000..8853e3d
--- /dev/null
+++ b/test-data/unit/check-super.test
@@ -0,0 +1,117 @@
+-- Test cases for type checker related to super().
+
+
+-- Supertype member reference
+-- --------------------------
+
+
+[case testAccessingSupertypeMethod]
+
+class B:
+  def f(self) -> 'B': pass
+class A(B):
+  def f(self) -> 'A':
+    a, b = None, None # type: (A, B)
+    a = super().f() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = super().g() # E: "g" undefined in superclass
+    b = super().f()
+    return a
+[out]
+
+[case testAccessingSuperTypeMethodWithArgs]
+from typing import Any
+class B:
+  def f(self, y: 'A') -> None: pass
+class A(B):
+  def f(self, y: Any) -> None:
+    a, b = None, None # type: (A, B)
+    super().f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A"
+    super().f(a)
+    self.f(b)
+    self.f(a)
+[out]
+
+[case testAccessingSuperInit]
+import typing
+class B:
+    def __init__(self, x: A) -> None: pass
+class A(B):
+  def __init__(self) -> None:
+    super().__init__(B(None)) # E: Argument 1 to "__init__" of "B" has incompatible type "B"; expected "A"
+    super().__init__()       # E: Too few arguments for "__init__" of "B"
+    super().__init__(A())
+[out]
+
+[case testAccessingSuperMemberWithDeepHierarchy]
+import typing
+class C:
+  def f(self) -> None: pass
+class B(C): pass
+class A(B):
+  def f(self) -> None:
+    super().g() # E: "g" undefined in superclass
+    super().f()
+[out]
+
+[case testAssignToBaseClassMethod]
+import typing
+class A:
+    def f(self) -> None: pass
+class B(A):
+    def g(self) -> None:
+        super().f = None
+[out]
+main:6: error: Invalid assignment target
+
+[case testSuperWithMultipleInheritance]
+import typing
+class A:
+  def f(self) -> None: pass
+class B:
+  def g(self, x: int) -> None: pass
+class C(A, B):
+    def f(self) -> None:
+        super().f()
+        super().g(1)
+        super().f(1) # E: Too many arguments for "f" of "A"
+        super().g() # E: Too few arguments for "g" of "B"
+        super().not_there() # E: "not_there" undefined in superclass
+[out]
+
+[case testSuperWithNew]
+class A:
+    def __new__(cls, x: int) -> 'A':
+        return object.__new__(cls)
+
+class B(A):
+    def __new__(cls, x: int, y: str = '') -> 'A':
+        super().__new__(cls, 1)
+        return super().__new__(cls, 1, '')  # E: Too many arguments for "__new__" of "A"
+B('')  # E: Argument 1 to "B" has incompatible type "str"; expected "int"
+B(1)
+B(1, 'x')
+[builtins fixtures/__new__.pyi]
+
+[case testSuperOutsideMethodNoCrash]
+class C:
+    a = super().whatever  # E: super() outside of a method is not supported
+
+reveal_type(C.a)  # E: Revealed type is 'Any'
+[out]
+
+[case testSuperWithUnknownBase]
+from typing import Any
+B = None  # type: Any
+class C(B):
+    def __init__(self, arg=0):
+        super(C, self).__init__(arg, arg=arg)
+[out]
+
+[case testSuperSilentInDynamicFunction]
+class A:
+    pass
+
+class B(A):
+    def foo(self):
+        super(B, self).foo() # Not an error
+[out]
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
new file mode 100644
index 0000000..2776172
--- /dev/null
+++ b/test-data/unit/check-tuples.test
@@ -0,0 +1,943 @@
+-- Normal assignment and subtyping
+-- -------------------------------
+
+
+[case testTupleAssignmentWithTupleTypes]
+from typing import Tuple
+t1 = None # type: Tuple[A]
+t2 = None # type: Tuple[B]
+t3 = None # type: Tuple[A, A]
+t4 = None # type: Tuple[A, B]
+t5 = None # type: Tuple[B, A]
+
+t1 = t2 # E: Incompatible types in assignment (expression has type "Tuple[B]", variable has type "Tuple[A]")
+t1 = t3 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]")
+t3 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A]", variable has type "Tuple[A, A]")
+t3 = t4 # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "Tuple[A, A]")
+t3 = t5 # E: Incompatible types in assignment (expression has type "Tuple[B, A]", variable has type "Tuple[A, A]")
+
+# Ok
+t1 = t1
+t2 = t2
+t3 = t3
+t4 = t4
+t5 = t5
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testTupleSubtyping]
+from typing import Tuple
+t1 = None # type: Tuple[A, A]
+t2 = None # type: Tuple[A, B]
+t3 = None # type: Tuple[B, A]
+
+t2 = t1  # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]")
+t2 = t3  # E: Incompatible types in assignment (expression has type "Tuple[B, A]", variable has type "Tuple[A, B]")
+t3 = t1  # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[B, A]")
+t3 = t2  # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "Tuple[B, A]")
+
+t1 = t2
+t1 = t3
+
+class A: pass
+class B(A): pass
+[builtins fixtures/tuple.pyi]
+
+[case testTupleCompatibilityWithOtherTypes]
+from typing import Tuple
+a, o = None, None # type: (A, object)
+t = None # type: Tuple[A, A]
+
+a = t # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "A")
+t = o # E: Incompatible types in assignment (expression has type "object", variable has type "Tuple[A, A]")
+t = a # E: Incompatible types in assignment (expression has type "A", variable has type "Tuple[A, A]")
+# TODO: callable types + tuples
+
+# Ok
+o = t
+t = None
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+[case testNestedTupleTypes]
+from typing import Tuple
+t1 = None # type: Tuple[A, Tuple[A, A]]
+t2 = None # type: Tuple[B, Tuple[B, B]]
+
+t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, Tuple[A, A]]", variable has type "Tuple[B, Tuple[B, B]]")
+t1 = t2
+
+class A: pass
+class B(A): pass
+[builtins fixtures/tuple.pyi]
+
+[case testNestedTupleTypes2]
+from typing import Tuple
+t1 = None # type: Tuple[A, Tuple[A, A]]
+t2 = None # type: Tuple[B, Tuple[B, B]]
+
+t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, Tuple[A, A]]", variable has type "Tuple[B, Tuple[B, B]]")
+t1 = t2
+
+class A: pass
+class B(A): pass
+[builtins fixtures/tuple.pyi]
+
+[case testSubtypingWithNamedTupleType]
+from typing import Tuple
+t1 = None # type: Tuple[A, A]
+t2 = None # type: tuple
+
+t1 = t2 # E: Incompatible types in assignment (expression has type Tuple[Any, ...], variable has type "Tuple[A, A]")
+t2 = t1
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+[case testTupleInitializationWithNone]
+from typing import Tuple
+t = None # type: Tuple[A, A]
+t = None
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+
+-- Tuple expressions
+-- -----------------
+
+
+[case testTupleExpressions]
+from typing import Tuple
+t1 = None # type: tuple
+t2 = None # type: Tuple[A]
+t3 = None # type: Tuple[A, B]
+
+a, b, c = None, None, None # type: (A, B, C)
+
+t2 = ()        # E: Incompatible types in assignment (expression has type "Tuple[]", variable has type "Tuple[A]")
+t2 = (a, a)    # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]")
+t3 = (a, a)    # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]")
+t3 = (b, b)    # E: Incompatible types in assignment (expression has type "Tuple[B, B]", variable has type "Tuple[A, B]")
+t3 = (a, b, a) # E: Incompatible types in assignment (expression has type "Tuple[A, B, A]", variable has type "Tuple[A, B]")
+
+t1 = ()
+t1 = (a,)
+t2 = (a,)
+t3 = (a, b)
+t3 = (a, c)
+t3 = (None, None)
+
+class A: pass
+class B: pass
+class C(B): pass
+[builtins fixtures/tuple.pyi]
+
+[case testVoidValueInTuple]
+import typing
+(None, f()) # E: "f" does not return a value
+(f(), None) # E: "f" does not return a value
+
+def f() -> None: pass
+[builtins fixtures/tuple.pyi]
+
+
+-- Indexing
+-- --------
+
+
+[case testIndexingTuples]
+from typing import Tuple
+t1 = None # type: Tuple[A, B]
+t2 = None # type: Tuple[A]
+t3 = None # type: Tuple[A, B, C, D, E]
+a, b = None, None # type: (A, B)
+x = None # type: Tuple[A, B, C]
+y = None # type: Tuple[A, C, E]
+n = 0
+
+a = t1[1] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = t1[0] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+t1[2]     # E: Tuple index out of range
+t1[3]     # E: Tuple index out of range
+t2[1]     # E: Tuple index out of range
+reveal_type(t1[n])     # E: Revealed type is 'Union[__main__.A, __main__.B]'
+reveal_type(t3[n:])    # E: Revealed type is 'Union[__main__.A, __main__.B, __main__.C, __main__.D, __main__.E]'
+b = t1[(0)] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a = t1[0]
+b = t1[1]
+b = t1[-1]
+a = t1[(0)]
+x = t3[0:3] # type (A, B, C)
+y = t3[0:5:2] # type (A, C, E)
+x = t3[:-2] # type (A, B, C)
+
+class A: pass
+class B: pass
+class C: pass
+class D: pass
+class E: pass
+[builtins fixtures/tuple.pyi]
+
+[case testIndexingTuplesWithNegativeIntegers]
+from typing import Tuple
+t1 = None  # type: Tuple[A, B]
+t2 = None  # type: Tuple[A]
+a, b = None, None  # type: A, B
+
+a = t1[-1] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = t1[-2] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+t1[-3]     # E: Tuple index out of range
+t1[-4]     # E: Tuple index out of range
+b = t2[(-1)] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a = t1[-2]
+b = t1[-1]
+a = t2[(-1)]
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testAssigningToTupleItems]
+from typing import Tuple
+t = None # type: Tuple[A, B]
+n = 0
+
+t[0] = A() # E: Unsupported target for indexed assignment
+t[2] = A() # E: Unsupported target for indexed assignment
+t[n] = A() # E: Unsupported target for indexed assignment
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+
+-- Multiple assignment
+-- -------------------
+
+
+[case testMultipleAssignmentWithTuples]
+from typing import Tuple
+t1 = None # type: Tuple[A, B]
+t2 = None # type: Tuple[A, B, A]
+a, b = None, None # type: (A, B)
+
+a, a = t1 # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b, b = t1 # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a, b, b = t2 # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a, b = t1
+a, b, a = t2
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleAssignmentWithInvalidNumberOfValues]
+from typing import Tuple
+t1 = None # type: Tuple[A, A, A]
+a = None # type: A
+
+a, a = t1       # E: Too many values to unpack (2 expected, 3 provided)
+a, a, a, a = t1 # E: Need more than 3 values to unpack (4 expected)
+
+a, a, a = t1
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleAssignmentWithTupleExpressionRvalue]
+
+a, b = None, None # type: (A, B)
+
+a, b = a, a # Fail
+a, b = b, a # Fail
+
+a, b = a, b
+a, a = a, a
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+[out]
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testSubtypingInMultipleAssignment]
+
+a, b = None, None # type: (A, B)
+
+b, b = a, b # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b, b = b, a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a, b = b, b
+b, a = b, b
+
+class A: pass
+class B(A): pass
+[builtins fixtures/tuple.pyi]
+
+[case testInitializationWithMultipleValues]
+
+a, b = None, None # type: (A, B)
+
+a1, b1 = a, a # type: (A, B)  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a2, b2 = b, b # type: (A, B)  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a3, b3 = a # type: (A, B)     # E: '__main__.A' object is not iterable
+a4, b4 = None # type: (A, B)  # E: 'builtins.None' object is not iterable
+a5, b5 = a, b, a # type: (A, B)  # E: Too many values to unpack (2 expected, 3 provided)
+
+ax, bx = a, b # type: (A, B)
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleAssignmentWithNonTupleRvalue]
+
+a, b = None, None # type: (A, B)
+def f(): pass
+
+a, b = None # E: 'builtins.None' object is not iterable
+a, b = a   # E: '__main__.A' object is not iterable
+a, b = f   # E: 'def () -> Any' object is not iterable
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleAssignmentWithIndexedLvalues]
+
+a, b = None, None # type: (A, B)
+aa, bb = None, None # type: (AA, BB)
+
+a[a], b[b] = a, bb   # E: Incompatible types in assignment (expression has type "A", target has type "AA")
+a[a], b[b] = aa, b   # E: Incompatible types in assignment (expression has type "B", target has type "BB")
+a[aa], b[b] = aa, bb # E: Invalid index type "AA" for "A"; expected type "A"
+a[a], b[bb] = aa, bb # E: Invalid index type "BB" for "B"; expected type "B"
+a[a], b[b] = aa, bb
+
+class A:
+    def __setitem__(self, x: 'A', y: 'AA') -> None: pass
+class B:
+    def __setitem__(self, x: 'B', y: 'BB') -> None: pass
+
+class AA: pass
+class BB: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleDeclarationWithParentheses]
+
+(a, b) = (None, None) # type: int, str
+a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+b = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+a = 1
+b = ''
+
+[case testMultipleAssignmentWithExtraParentheses]
+
+a, b = None, None # type: (A, B)
+
+(a, b) = (a, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+(a, b) = (b, b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+((a), (b)) = ((a), (a))  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+((a), (b)) = ((b), (b))  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[a, b] = a, a  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+[a, b] = b, b  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+(a, b) = (a, b)
+((a), (b)) = ((a), (b))
+[a, b] = a, b
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleAssignmentUsingSingleTupleType]
+from typing import Tuple
+a, b = None, None  # type: Tuple[int, str]
+a = 1
+b = ''
+a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+b = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testMultipleAssignmentWithMixedVariables]
+a = b, c = 1, 1
+x, y = p, q = 1, 1
+u, v, w = r, s = 1, 1 # E: Need more than 2 values to unpack (3 expected)
+d, e = f, g, h = 1, 1 # E: Need more than 2 values to unpack (3 expected)
+
+
+-- Assignment to starred expressions
+-- ---------------------------------
+
+
+[case testAssignmentToStarMissingAnnotation]
+from typing import List
+t = 1, 2
+a, b, *c = 1, 2  # E: Need type annotation for variable
+aa, bb, *cc = t  # E: Need type annotation for variable
+[builtins fixtures/list.pyi]
+
+[case testAssignmentToStarAnnotation]
+from typing import List
+li, lo = None, None # type: List[int], List[object]
+a, b, *c = 1, 2  # type: int, int, List[int]
+c = lo  # E: Incompatible types in assignment (expression has type List[object], variable has type List[int])
+c = li
+[builtins fixtures/list.pyi]
+
+[case testAssignmentToStarCount1]
+from typing import List
+ca = None # type: List[int]
+c = [1]
+a, b, *c = 1,  # E: Need more than 1 value to unpack (2 expected)
+a, b, *c = 1, 2
+a, b, *c = 1, 2, 3
+a, b, *c = 1, 2, 3, 4
+[builtins fixtures/list.pyi]
+
+[case testAssignmentToStarCount2]
+from typing import List
+ca = None # type: List[int]
+t1 = 1,
+t2 = 1, 2
+t3 = 1, 2, 3
+t4 = 1, 2, 3, 4
+c = [1]
+a, b, *c = t1  # E: Need more than 1 value to unpack (2 expected)
+a, b, *c = t2
+a, b, *c = t3
+a, b, *c = t4
+[builtins fixtures/list.pyi]
+
+[case testAssignmentToStarFromAny]
+from typing import Any, cast
+a, c = cast(Any, 1), C()
+p, *q = a
+c = a
+c = q
+
+class C: pass
+
+[case testAssignmentToComplexStar]
+from typing import List
+li = None # type: List[int]
+a, *(li) = 1,
+a, *(b, c) = 1, 2  # E: Need more than 1 value to unpack (2 expected)
+a, *(b, c) = 1, 2, 3
+a, *(b, c) = 1, 2, 3, 4  # E: Too many values to unpack (2 expected, 3 provided)
+[builtins fixtures/list.pyi]
+
+[case testAssignmentToStarFromTupleType]
+from typing import List, Tuple
+li = None # type: List[int]
+la = None # type: List[A]
+ta = None # type: Tuple[A, A, A]
+a, *la = ta
+a, *li = ta  # E
+a, *na = ta
+na = la
+na = a  # E
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+main:6: error: List item 0 has incompatible type "A"
+main:6: error: List item 1 has incompatible type "A"
+main:9: error: Incompatible types in assignment (expression has type "A", variable has type List[A])
+
+[case testAssignmentToStarFromTupleInference]
+from typing import List
+li = None # type: List[int]
+la = None # type: List[A]
+a, *l = A(), A()
+l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = la
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testAssignmentToStarFromListInference]
+from typing import List
+li = None # type: List[int]
+la = None # type: List[A]
+a, *l = [A(), A()]
+l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = la
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testAssignmentToStarFromTupleTypeInference]
+from typing import List, Tuple
+li = None # type: List[int]
+la = None # type: List[A]
+ta = None # type: Tuple[A, A, A]
+a, *l = ta
+l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = la
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testAssignmentToStarFromListTypeInference]
+from typing import List
+li = None # type: List[int]
+la = None # type: List[A]
+a, *l = la
+l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = la
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+
+
+-- Nested tuple assignment
+-- ----------------------------
+
+
+[case testNestedTupleAssignment1]
+
+a1, b1, c1 = None, None, None # type: (A, B, C)
+a2, b2, c2 = None, None, None # type: (A, B, C)
+
+a1, (b1, c1) = a2, (b2, c2)
+a1, (a1, (b1, c1)) = a2, (a2, (b2, c2))
+a1, (a1, (a1, b1)) = a1, (a1, (a1, c1))  # Fail
+
+class A: pass
+class B: pass
+class C: pass
+[out]
+main:7: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+[case testNestedTupleAssignment2]
+
+a1, b1, c1 = None, None, None # type: (A, B, C)
+a2, b2, c2 = None, None, None # type: (A, B, C)
+t = a1, b1
+
+a2, b2 = t
+(a2, b2), c2 = t, c1
+(a2, c2), c2 = t, c1  # Fail
+t, c2 = (a2, b2), c2
+t, c2 = (a2, a2), c2  # Fail
+t = a1, a1, a1  # Fail
+t = a1  # Fail
+a2, a2, a2 = t  # Fail
+a2, = t  # Fail
+a2 = t  # Fail
+
+class A: pass
+class B: pass
+class C: pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "B", variable has type "C")
+main:10: error: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]")
+main:11: error: Incompatible types in assignment (expression has type "Tuple[A, A, A]", variable has type "Tuple[A, B]")
+main:12: error: Incompatible types in assignment (expression has type "A", variable has type "Tuple[A, B]")
+main:13: error: Need more than 2 values to unpack (3 expected)
+main:14: error: Too many values to unpack (1 expected, 2 provided)
+main:15: error: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "A")
+
+
+-- Error messages
+-- --------------
+
+
+[case testTupleErrorMessages]
+
+a = None # type: A
+
+(a, a) + a  # E: Unsupported left operand type for + ("Tuple[A, A]")
+a + (a, a)  # E: Unsupported operand types for + ("A" and "Tuple[A, A]")
+f((a, a))   # E: Argument 1 to "f" has incompatible type "Tuple[A, A]"; expected "A"
+(a, a).foo  # E: "Tuple[A, A]" has no attribute "foo"
+
+def f(x: 'A') -> None: pass
+
+class A:
+    def __add__(self, x: 'A') -> 'A': pass
+[builtins fixtures/tuple.pyi]
+
+[case testLargeTuplesInErrorMessages]
+
+a = None # type: LongTypeName
+a + (a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a) # Fail
+
+class LongTypeName:
+    def __add__(self, x: 'LongTypeName') -> 'LongTypeName': pass
+[builtins fixtures/tuple.pyi]
+[out]
+main:3: error: Unsupported operand types for + ("LongTypeName" and tuple(length 50))
+
+
+-- Tuple methods
+-- -------------
+
+
+[case testTupleMethods]
+from typing import Tuple
+t = None # type: Tuple[int, str]
+i = 0
+s = ''
+b = bool()
+
+s = t.__len__()  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+i = t.__str__()  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+i = s in t       # E: Incompatible types in assignment (expression has type "bool", variable has type "int")
+t.foo            # E: "Tuple[int, str]" has no attribute "foo"
+
+i = t.__len__()
+s = t.__str__()
+b = s in t
+
+[file builtins.py]
+from typing import TypeVar, Generic
+_T = TypeVar('_T')
+class object:
+    def __init__(self) -> None: pass
+class tuple(Generic[_T]):
+    def __len__(self) -> int: pass
+    def __str__(self) -> str: pass
+    def __contains__(self, o: object) -> bool: pass
+class int: pass
+class str: pass
+class bool: pass
+class type: pass
+class function: pass
+
+
+-- For loop over tuple
+-- -------------------
+
+
+[case testForLoopOverTuple]
+import typing
+t = 1, 2
+for x in t:
+    x = 1
+    x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[builtins fixtures/for.pyi]
+
+[case testForLoopOverEmptyTuple]
+import typing
+t = ()
+for x in t: pass
+[builtins fixtures/for.pyi]
+
+[case testForLoopOverNoneValuedTuple]
+import typing
+t = ()
+for x in None, None: pass
+[builtins fixtures/for.pyi]
+
+[case testForLoopOverTupleAndSubtyping]
+import typing
+class A: pass
+class B(A): pass
+for x in B(), A():
+    x = A()
+    x = B()
+    x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+[builtins fixtures/for.pyi]
+
+[case testTupleIterable]
+y = 'a'
+x = sum((1,2))
+y = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[builtins fixtures/tuple.pyi]
+
+
+-- Tuple as a base type
+-- --------------------
+
+
+[case testTupleBaseClass]
+import m
+[file m.pyi]
+from typing import Tuple
+class A(Tuple[int, str]):
+    def f(self, x: int) -> None:
+        a, b = 1, ''
+        a, b = self
+        b, a = self  # Error
+        self.f('')   # Error
+[builtins fixtures/tuple.pyi]
+[out]
+tmp/m.pyi:6: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+tmp/m.pyi:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/m.pyi:7: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testValidTupleBaseClass2]
+from typing import Tuple
+class A(Tuple[int, str]): pass
+
+x, y = A()
+reveal_type(x) # E: Revealed type is 'builtins.int'
+reveal_type(y) # E: Revealed type is 'builtins.str'
+
+x1 = A()[0] # type: int
+x2 = A()[1] # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+A()[2] # E: Tuple index out of range
+
+class B(Tuple[int, ...]): pass
+
+z1 = B()[0] # type: int
+z2 = B()[1] # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+B()[100]
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testValidTupleBaseClass]
+from typing import Tuple
+class A(tuple): pass
+[out]
+
+[case testTupleBaseClass2-skip]
+import m
+[file m.pyi]
+# This doesn't work correctly -- no errors are reported (#867)
+from typing import Tuple
+a = None # type: A
+class A(Tuple[int, str]): pass
+x, y = a
+x() # Expected: "int" not callable
+y() # Expected: "str" not callable
+[out]
+(should fail)
+
+[case testGenericClassWithTupleBaseClass]
+from typing import TypeVar, Generic, Tuple
+T = TypeVar('T')
+class Test(Generic[T], Tuple[T]): pass
+x = Test() # type: Test[int]
+[builtins fixtures/tuple.pyi]
+[out]
+main:4: error: Generic tuple types not supported
+
+
+-- Variable-length tuples (Tuple[t, ...] with literal '...')
+-- ---------------------------------------------------------
+
+
+[case testIndexingVariableLengthTuple]
+from typing import Tuple
+x = () # type: Tuple[str, ...]
+n = 5
+x[n]() # E: "str" not callable
+x[3]() # E: "str" not callable
+[builtins fixtures/tuple.pyi]
+
+[case testSubtypingVariableLengthTuple]
+from typing import Tuple
+class A: pass
+class B(A): pass
+def fa(t: Tuple[A, ...]) -> None: pass
+def fb(t: Tuple[B, ...]) -> None: pass
+ta = () # type: Tuple[A, ...]
+tb = () # type: Tuple[B, ...]
+fa(ta)
+fa(tb)
+fb(tb)
+fb(ta) # E: Argument 1 to "fb" has incompatible type Tuple[A, ...]; expected Tuple[B, ...]
+[builtins fixtures/tuple.pyi]
+
+[case testSubtypingFixedAndVariableLengthTuples]
+from typing import Tuple
+class A: pass
+class B(A): pass
+def fa(t: Tuple[A, ...]) -> None: pass
+def fb(t: Tuple[B, ...]) -> None: pass
+aa = (A(), A())
+ab = (A(), B())
+bb = (B(), B())
+fa(aa)
+fa(ab)
+fa(bb)
+fb(bb)
+fb(ab) # E: Argument 1 to "fb" has incompatible type "Tuple[A, B]"; expected Tuple[B, ...]
+fb(aa) # E: Argument 1 to "fb" has incompatible type "Tuple[A, A]"; expected Tuple[B, ...]
+[builtins fixtures/tuple.pyi]
+
+[case testSubtypingTupleIsContainer]
+from typing import Container
+a = None  # type: Container[str]
+a = ()
+
+[case testSubtypingTupleIsSized]
+from typing import Sized
+a = None  # type: Sized
+a = ()
+
+[case testTupleWithStarExpr1]
+
+a = (1, 2)
+b = (*a, '')
+reveal_type(b)  # E: Revealed type is 'Tuple[builtins.int, builtins.int, builtins.str]'
+
+[case testTupleWithStarExpr2]
+a = [1]
+b = (0, *a)
+reveal_type(b)  # E: Revealed type is 'builtins.tuple[builtins.int*]'
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithStarExpr3]
+a = ['']
+b = (0, *a)
+reveal_type(b)  # E: Revealed type is 'builtins.tuple[builtins.object*]'
+c = (*a, '')
+reveal_type(c)  # E: Revealed type is 'builtins.tuple[builtins.str*]'
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithStarExpr4]
+a = (1, 1, 'x', 'x')
+b = (1, 'x')
+a = (0, *b, '')
+[builtins fixtures/tuple.pyi]
+
+[case testTupleMeetTupleAny]
+from typing import Union, Tuple
+class A: pass
+class B: pass
+
+def f(x: Union[B, Tuple[A, A]]) -> None:
+    if isinstance(x, tuple):
+        reveal_type(x) # E: Revealed type is 'Tuple[__main__.A, __main__.A]'
+    else:
+        reveal_type(x) # E: Revealed type is '__main__.B'
+
+def g(x: Union[str, Tuple[str, str]]) -> None:
+    if isinstance(x, tuple):
+        reveal_type(x) # E: Revealed type is 'Tuple[builtins.str, builtins.str]'
+    else:
+        reveal_type(x) # E: Revealed type is 'builtins.str'
+
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTupleMeetTupleAnyComplex]
+from typing import Tuple, Union
+
+Pair = Tuple[int, int]
+Variant = Union[int, Pair]
+def tuplify(v: Variant) -> None:
+    reveal_type(v) # E: Revealed type is 'Union[builtins.int, Tuple[builtins.int, builtins.int]]'
+    if not isinstance(v, tuple):
+        reveal_type(v) # E: Revealed type is 'builtins.int'
+        v = (v, v)
+        reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.int]'
+    reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.int]'
+    reveal_type(v[0]) # E: Revealed type is 'builtins.int'
+
+Pair2 = Tuple[int, str]
+Variant2 = Union[int, Pair2]
+def tuplify2(v: Variant2) -> None:
+    if isinstance(v, tuple):
+        reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.str]'
+    else:
+        reveal_type(v) # E: Revealed type is 'builtins.int'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTupleMeetTupleAnyAfter]
+from typing import Tuple, Union
+
+def good(blah: Union[Tuple[int, int], int]) -> None:
+    reveal_type(blah) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], builtins.int]'
+    if isinstance(blah, tuple):
+        reveal_type(blah) # E: Revealed type is 'Tuple[builtins.int, builtins.int]'
+    reveal_type(blah) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], builtins.int]'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTupleMeetTupleVariable]
+from typing import Tuple, TypeVar, Generic, Union
+T = TypeVar('T')
+
+class A: pass
+class B1(A): pass
+class B2(A): pass
+class C: pass
+
+x = None # type: Tuple[A, ...]
+y = None # type: Tuple[Union[B1, C], Union[B2, C]]
+
+def g(x: T) -> Tuple[T, T]:
+    return (x, x)
+
+z = 1
+x, y = g(z) # E: Argument 1 to "g" has incompatible type "int"; expected "Tuple[B1, B2]"
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTupleWithUndersizedContext]
+a = ([1], 'x')
+a = ([], 'x', 1)  # E: Incompatible types in assignment (expression has type "Tuple[List[int], str, int]", variable has type "Tuple[List[int], str]")
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithOversizedContext]
+a = (1, [1], 'x')
+a = (1, [])  # E: Incompatible types in assignment (expression has type "Tuple[int, List[int]]", variable has type "Tuple[int, List[int], str]")
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithoutContext]
+a = (1, [])  # E: Need type annotation for variable
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithUnionContext]
+from typing import List, Union, Tuple
+def f() -> Union[int, Tuple[List[str]]]:
+    return ([],)
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithVariableSizedTupleContext]
+from typing import List, Tuple
+def f() -> Tuple[List[str], ...]:
+    return ([],)
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithoutArgs]
+from typing import Tuple
+def f(a: Tuple) -> None: pass
+f(())
+f((1,))
+f(('', ''))
+f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected Tuple[Any, ...]
+[builtins fixtures/tuple.pyi]
+
+[case testTupleSingleton]
+
+from typing import Tuple
+def f(a: Tuple[()]) -> None: pass
+f(())
+f((1,))  # E: Argument 1 to "f" has incompatible type "Tuple[int]"; expected "Tuple[]"
+f(('', ''))  # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[]"
+f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[]"
+[builtins fixtures/tuple.pyi]
+
+[case testNonliteralTupleIndex]
+t = (0, "")
+x = 0
+y = ""
+reveal_type(t[x])  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+t[y]  # E: Invalid tuple index type (actual type "str", expected type "Union[int, slice]")
+[builtins fixtures/tuple.pyi]
+
+[case testNonliteralTupleSlice]
+t = (0, "")
+x = 0
+y = ""
+reveal_type(t[x:])  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+t[y:]  # E: Slice index must be an integer or None
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test
new file mode 100644
index 0000000..ab4da00
--- /dev/null
+++ b/test-data/unit/check-type-aliases.test
@@ -0,0 +1,114 @@
+[case testSimpleTypeAlias]
+import typing
+i = int
+def f(x: i) -> None: pass
+f(1)
+f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testUnionTypeAlias]
+from typing import Union
+U = Union[int, str]
+def f(x: U) -> None: pass
+f(1)
+f('')
+f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]"
+
+[case testTupleTypeAlias]
+from typing import Tuple
+T = Tuple[int, str]
+def f(x: T) -> None: pass
+f((1, 'x'))
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[int, str]"
+
+[case testCallableTypeAlias]
+from typing import Callable
+A = Callable[[int], None]
+f = None  # type: A
+f(1)
+f('') # E: Argument 1 has incompatible type "str"; expected "int"
+
+[case testListTypeAlias]
+from typing import List
+A = List[int]
+def f(x: A) -> None: pass
+f([1])
+f(['x']) # E: List item 0 has incompatible type "str"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testAnyTypeAlias]
+from typing import Any
+A = Any
+def f(x: A) -> None:
+    x.foo()
+f(1)
+f('x')
+
+[case testImportUnionAlias]
+import typing
+from _m import U
+def f(x: U) -> None: pass
+f(1)
+f('x')
+f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]"
+[file _m.py]
+from typing import Union
+U = Union[int, str]
+[builtins fixtures/tuple.pyi]
+
+[case testProhibitReassigningAliases]
+A = float
+A = int  # E: Cannot assign to a type \
+         # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation
+[out]
+
+[case testProhibitReassigningSubscriptedAliases]
+from typing import Callable
+A = Callable[[], float]
+A = Callable[[], int]  # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation \
+                       # E: Value of type "int" is not indexable
+                       # the second error is because of `Callable = 0` in lib-stub/typing.pyi
+[builtins fixtures/list.pyi]
+[out]
+
+[case testProhibitReassigningGenericAliases]
+from typing import TypeVar, Union, Tuple
+T = TypeVar('T')
+
+A = Tuple[T, T]
+A = Union[T, int]  # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation \
+                   # E: Value of type "int" is not indexable
+                   # the second error is because of `Union = 0` in lib-stub/typing.pyi
+[out]
+
+[case testProhibitUsingVariablesAsTypesAndAllowAliasesAsTypes]
+from typing import TypeVar, Sequence, Type
+T = TypeVar('T')
+
+A: Type[float] = int
+A = float  # OK
+x: A  # E: Invalid type "__main__.A"
+def bad(tp: A) -> None:  # E: Invalid type "__main__.A"
+    pass
+
+Alias = int
+GenAlias = Sequence[T]
+def fun(x: Alias) -> GenAlias[int]: pass
+[out]
+
+[case testTypeAliasInBuiltins]
+def f(x: bytes): pass
+bytes
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
+[builtins fixtures/alias.pyi]
+
+[case testEmptyTupleTypeAlias]
+from typing import Tuple, Callable
+EmptyTuple = Tuple[()]
+x = None # type: EmptyTuple
+reveal_type(x)  # E: Revealed type is 'Tuple[]'
+
+EmptyTupleCallable = Callable[[Tuple[()]], None]
+f = None # type: EmptyTupleCallable
+reveal_type(f)  # E: Revealed type is 'def (Tuple[])'
+[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-type-checks.test b/test-data/unit/check-type-checks.test
new file mode 100644
index 0000000..c4905a7
--- /dev/null
+++ b/test-data/unit/check-type-checks.test
@@ -0,0 +1,113 @@
+-- Conditional type checks.
+
+
+[case testSimpleIsinstance]
+
+x = None  # type: object
+n = None  # type: int
+s = None  # type: str
+n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+if isinstance(x, int):
+    n = x
+    s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+
+[case testSimpleIsinstance2]
+import typing
+def f(x: object, n: int, s: str) -> None:
+    n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+    if isinstance(x, int):
+        n = x
+        s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testSimpleIsinstance3]
+
+class A:
+    x = None  # type: object
+    n = None  # type: int
+    s = None  # type: str
+    n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+    if isinstance(x, int):
+        n = x
+        s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    else:
+        n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testMultipleIsinstanceTests]
+import typing
+class A: pass
+class B(A): pass
+def f(x: object, a: A, b: B, c: int) -> None:
+    if isinstance(x, A):
+        if isinstance(x, B):
+            b = x
+            x = a
+        a = x
+        c = x # E: Incompatible types in assignment (expression has type "A", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testMultipleIsinstanceTests2]
+import typing
+class A: pass
+class B(A): pass
+def f(x: object, y: object, n: int, s: str) -> None:
+    if isinstance(x, int):
+        if isinstance(y, str):
+            n = x
+            s = y
+            s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+            n = y # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+        s = y # E: Incompatible types in assignment (expression has type "object", variable has type "str")
+        n = y # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+        n = x
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndElif]
+import typing
+def f(x: object, n: int, s: str) -> None:
+    n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+    if isinstance(x, int):
+        n = x
+        s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    elif isinstance(x, str):
+        s = x
+        n = x # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    else:
+        n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+        s = x # E: Incompatible types in assignment (expression has type "object", variable has type "str")
+    n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndAnyType]
+from typing import Any
+def f(x: Any, n: int, s: str) -> None:
+    s = x
+    if isinstance(x, int):
+        n = x
+        s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    s = x
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C(Generic[T]):
+    def f(self, x: T) -> None: pass
+def f(x: object) -> None:
+    if isinstance(x, C):
+        x.f(1)
+        x.f('')
+        x.g() # E: C[Any] has no attribute "g"
+    x.g() # E: "object" has no attribute "g"
+[builtins fixtures/isinstance.pyi]
+[out]
diff --git a/test-data/unit/check-type-promotion.test b/test-data/unit/check-type-promotion.test
new file mode 100644
index 0000000..0a39996
--- /dev/null
+++ b/test-data/unit/check-type-promotion.test
@@ -0,0 +1,39 @@
+-- Test cases for type promotion (e.g. int -> float).
+
+
+[case testPromoteIntToFloat]
+def f(x: float) -> None: pass
+f(1)
+[builtins fixtures/primitives.pyi]
+
+[case testCantPromoteFloatToInt]
+def f(x: int) -> None: pass
+f(1.1) # E: Argument 1 to "f" has incompatible type "float"; expected "int"
+[builtins fixtures/primitives.pyi]
+
+[case testPromoteFloatToComplex]
+def f(x: complex) -> None: pass
+f(1)
+[builtins fixtures/primitives.pyi]
+
+[case testPromoteIntToComplex]
+def f(x: complex) -> None: pass
+f(1)
+[builtins fixtures/primitives.pyi]
+
+[case testPromoteBytearrayToByte]
+def f(x: bytes) -> None: pass
+f(bytearray())
+[builtins fixtures/primitives.pyi]
+
+[case testNarrowingDownFromPromoteTargetType]
+y = 0.0
+y = 1
+y() # E: "int" not callable
+[builtins fixtures/primitives.pyi]
+
+[case testNarrowingDownFromPromoteTargetType2]
+y = 0.0
+y = 1
+y.x # E: "int" has no attribute "x"
+[builtins fixtures/primitives.pyi]
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
new file mode 100644
index 0000000..0e908e8
--- /dev/null
+++ b/test-data/unit/check-typeddict.test
@@ -0,0 +1,1258 @@
+-- Create Instance
+
+[case testCanCreateTypedDictInstanceWithKeywordArguments]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(x=42, y=1337)
+reveal_type(p)  # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})'
+# Use values() to check fallback value type.
+reveal_type(p.values()) # E: Revealed type is 'typing.Iterable[builtins.int*]'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testCanCreateTypedDictInstanceWithDictCall]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(dict(x=42, y=1337))
+reveal_type(p)  # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})'
+# Use values() to check fallback value type.
+reveal_type(p.values()) # E: Revealed type is 'typing.Iterable[builtins.int*]'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testCanCreateTypedDictInstanceWithDictLiteral]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point({'x': 42, 'y': 1337})
+reveal_type(p)  # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})'
+# Use values() to check fallback value type.
+reveal_type(p.values()) # E: Revealed type is 'typing.Iterable[builtins.int*]'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testCanCreateTypedDictInstanceWithNoArguments]
+from typing import TypeVar, Union
+from mypy_extensions import TypedDict
+EmptyDict = TypedDict('EmptyDict', {})
+p = EmptyDict()
+reveal_type(p)  # E: Revealed type is 'TypedDict('__main__.EmptyDict', {})'
+reveal_type(p.values()) # E: Revealed type is 'typing.Iterable[<nothing>]'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+
+-- Create Instance (Errors)
+
+[case testCannotCreateTypedDictInstanceWithUnknownArgumentPattern]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(42, 1337)  # E: Expected keyword arguments, {...}, or dict(...) in TypedDict constructor
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictInstanceNonLiteralItemName]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+x = 'x'
+p = Point({x: 42, 'y': 1337})  # E: Expected TypedDict key to be string literal
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictInstanceWithExtraItems]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(x=42, y=1337, z=666)  # E: Extra key 'z' for TypedDict "Point"
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictInstanceWithMissingItems]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(x=42)  # E: Key 'y' missing for TypedDict "Point"
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictInstanceWithIncompatibleItemType]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(x='meaning_of_life', y=1337)  # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int")
+[builtins fixtures/dict.pyi]
+
+
+-- Define TypedDict (Class syntax)
+
+[case testCanCreateTypedDictWithClass]
+# flags: --python-version 3.6
+from mypy_extensions import TypedDict
+
+class Point(TypedDict):
+    x: int
+    y: int
+
+p = Point(x=42, y=1337)
+reveal_type(p)  # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})'
+[builtins fixtures/dict.pyi]
+
+[case testCanCreateTypedDictWithSubclass]
+# flags: --python-version 3.6
+from mypy_extensions import TypedDict
+
+class Point1D(TypedDict):
+    x: int
+class Point2D(Point1D):
+    y: int
+r: Point1D
+p: Point2D
+reveal_type(r)  # E: Revealed type is 'TypedDict('__main__.Point1D', {'x': builtins.int})'
+reveal_type(p)  # E: Revealed type is 'TypedDict('__main__.Point2D', {'x': builtins.int, 'y': builtins.int})'
+[builtins fixtures/dict.pyi]
+
+[case testCanCreateTypedDictWithSubclass2]
+# flags: --python-version 3.6
+from mypy_extensions import TypedDict
+
+class Point1D(TypedDict):
+    x: int
+class Point2D(TypedDict, Point1D): # We also allow to include TypedDict in bases, it is simply ignored at runtime
+    y: int
+
+p: Point2D
+reveal_type(p)  # E: Revealed type is 'TypedDict('__main__.Point2D', {'x': builtins.int, 'y': builtins.int})'
+[builtins fixtures/dict.pyi]
+
+[case testCanCreateTypedDictClassEmpty]
+# flags: --python-version 3.6
+from mypy_extensions import TypedDict
+
+class EmptyDict(TypedDict):
+    pass
+
+p = EmptyDict()
+reveal_type(p)  # E: Revealed type is 'TypedDict('__main__.EmptyDict', {})'
+[builtins fixtures/dict.pyi]
+
+
+-- Define TypedDict (Class syntax errors)
+
+[case testCanCreateTypedDictWithClassOldVersion]
+# flags: --python-version 3.5
+from mypy_extensions import TypedDict
+
+class Point(TypedDict): # E: TypedDict class syntax is only supported in Python 3.6
+    pass
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictWithClassOtherBases]
+# flags: --python-version 3.6
+from mypy_extensions import TypedDict
+
+class A: pass
+
+class Point1D(TypedDict, A): # E: All bases of a new TypedDict must be TypedDict types
+    x: int
+class Point2D(Point1D, A): # E: All bases of a new TypedDict must be TypedDict types
+    y: int
+
+p: Point2D
+reveal_type(p)  # E: Revealed type is 'TypedDict('__main__.Point2D', {'x': builtins.int, 'y': builtins.int})'
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictWithClassWithOtherStuff]
+# flags: --python-version 3.6
+from mypy_extensions import TypedDict
+
+class Point(TypedDict):
+    x: int
+    y: int = 1 # E: Right hand side values are not supported in TypedDict
+    def f(): pass # E: Invalid statement in TypedDict definition; expected "field_name: field_type"
+    z = int # E: Invalid statement in TypedDict definition; expected "field_name: field_type"
+
+p = Point(x=42, y=1337, z='whatever')
+reveal_type(p)  # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int, 'z': Any})'
+[builtins fixtures/dict.pyi]
+
+[case testCanCreateTypedDictTypeWithUnderscoreItemName]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int, '_fallback': object})
+[builtins fixtures/dict.pyi]
+
+[case testCanCreateTypedDictWithClassUnderscores]
+# flags: --python-version 3.6
+from mypy_extensions import TypedDict
+
+class Point(TypedDict):
+    x: int
+    _y: int
+
+p: Point
+reveal_type(p) # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, '_y': builtins.int})'
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictWithClassOverwriting]
+# flags: --python-version 3.6
+from mypy_extensions import TypedDict
+
+class Bad(TypedDict):
+    x: int
+    x: str # E: Duplicate TypedDict field "x"
+
+b: Bad
+reveal_type(b) # E: Revealed type is 'TypedDict('__main__.Bad', {'x': builtins.int})'
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictWithClassOverwriting2]
+# flags: --python-version 3.6
+from mypy_extensions import TypedDict
+
+class Point1(TypedDict):
+    x: int
+class Point2(TypedDict):
+    x: float
+class Bad(Point1, Point2): # E: Cannot overwrite TypedDict field "x" while merging
+    pass
+
+b: Bad
+reveal_type(b) # E: Revealed type is 'TypedDict('__main__.Bad', {'x': builtins.int})'
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictWithClassOverwriting2]
+# flags: --python-version 3.6
+from mypy_extensions import TypedDict
+
+class Point1(TypedDict):
+    x: int
+class Point2(Point1):
+    x: float # E: Cannot overwrite TypedDict field "x" while extending
+
+p2: Point2
+reveal_type(p2) # E: Revealed type is 'TypedDict('__main__.Point2', {'x': builtins.int})'
+[builtins fixtures/dict.pyi]
+
+
+-- Subtyping
+
+[case testCanConvertTypedDictToItself]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+def identity(p: Point) -> Point:
+    return p
+[builtins fixtures/dict.pyi]
+
+[case testCanConvertTypedDictToEquivalentTypedDict]
+from mypy_extensions import TypedDict
+PointA = TypedDict('PointA', {'x': int, 'y': int})
+PointB = TypedDict('PointB', {'x': int, 'y': int})
+def identity(p: PointA) -> PointB:
+    return p
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToSimilarTypedDictWithNarrowerItemTypes]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+ObjectPoint = TypedDict('ObjectPoint', {'x': object, 'y': object})
+def convert(op: ObjectPoint) -> Point:
+    return op  # E: Incompatible return value type (got "ObjectPoint", expected "Point")
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToSimilarTypedDictWithWiderItemTypes]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+ObjectPoint = TypedDict('ObjectPoint', {'x': object, 'y': object})
+def convert(p: Point) -> ObjectPoint:
+    return p  # E: Incompatible return value type (got "Point", expected "ObjectPoint")
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToSimilarTypedDictWithIncompatibleItemTypes]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+Chameleon = TypedDict('Chameleon', {'x': str, 'y': str})
+def convert(p: Point) -> Chameleon:
+    return p  # E: Incompatible return value type (got "Point", expected "Chameleon")
+[builtins fixtures/dict.pyi]
+
+[case testCanConvertTypedDictToNarrowerTypedDict]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+Point1D = TypedDict('Point1D', {'x': int})
+def narrow(p: Point) -> Point1D:
+    return p
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToWiderTypedDict]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+Point3D = TypedDict('Point3D', {'x': int, 'y': int, 'z': int})
+def widen(p: Point) -> Point3D:
+    return p  # E: Incompatible return value type (got "Point", expected "Point3D")
+[builtins fixtures/dict.pyi]
+
+[case testCanConvertTypedDictToCompatibleMapping]
+from mypy_extensions import TypedDict
+from typing import Mapping
+Point = TypedDict('Point', {'x': int, 'y': int})
+def as_mapping(p: Point) -> Mapping[str, int]:
+    return p
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToCompatibleMapping]
+from mypy_extensions import TypedDict
+from typing import Mapping
+Point = TypedDict('Point', {'x': int, 'y': int})
+def as_mapping(p: Point) -> Mapping[str, str]:
+    return p  # E: Incompatible return value type (got "Point", expected Mapping[str, str])
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictAcceptsIntForFloatDuckTypes]
+from mypy_extensions import TypedDict
+from typing import Any, Mapping
+Point = TypedDict('Point', {'x': float, 'y': float})
+def create_point() -> Point:
+    return Point(x=1, y=2)
+reveal_type(Point(x=1, y=2))  # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.float, 'y': builtins.float})'
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictDoesNotAcceptsFloatForInt]
+from mypy_extensions import TypedDict
+from typing import Any, Mapping
+Point = TypedDict('Point', {'x': int, 'y': int})
+def create_point() -> Point:
+    return Point(x=1.2, y=2.5)
+[out]
+main:5: error: Incompatible types (expression has type "float", TypedDict item "x" has type "int")
+main:5: error: Incompatible types (expression has type "float", TypedDict item "y" has type "int")
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictAcceptsAnyType]
+from mypy_extensions import TypedDict
+from typing import Any, Mapping
+Point = TypedDict('Point', {'x': float, 'y': float})
+def create_point(something: Any) -> Point:
+    return Point({
+      'x': something.x,
+      'y': something.y
+    })
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictValueTypeContext]
+from mypy_extensions import TypedDict
+from typing import List
+D = TypedDict('D', {'x': List[int]})
+reveal_type(D(x=[]))  # E: Revealed type is 'TypedDict('__main__.D', {'x': builtins.list[builtins.int]})'
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToDictOrMutableMapping]
+from mypy_extensions import TypedDict
+from typing import Dict, MutableMapping
+Point = TypedDict('Point', {'x': int, 'y': int})
+def as_dict(p: Point) -> Dict[str, int]:
+    return p  # E: Incompatible return value type (got "Point", expected Dict[str, int])
+def as_mutable_mapping(p: Point) -> MutableMapping[str, int]:
+    return p  # E: Incompatible return value type (got "Point", expected MutableMapping[str, int])
+[builtins fixtures/dict.pyi]
+
+[case testCanConvertTypedDictToAny]
+from mypy_extensions import TypedDict
+from typing import Any
+Point = TypedDict('Point', {'x': int, 'y': int})
+def unprotect(p: Point) -> Any:
+    return p
+[builtins fixtures/dict.pyi]
+
+[case testAnonymousTypedDictInErrorMessages]
+from mypy_extensions import TypedDict
+
+A = TypedDict('A', {'x': int, 'y': str})
+B = TypedDict('B', {'x': int, 'z': str, 'a': int})
+C = TypedDict('C', {'x': int, 'z': str, 'a': str})
+a: A
+b: B
+c: C
+
+def f(a: A) -> None: pass
+
+l = [a, b]  # Join generates an anonymous TypedDict
+f(l) # E: Argument 1 to "f" has incompatible type List[TypedDict({'x': int})]; expected "A"
+ll = [b, c]
+f(ll) # E: Argument 1 to "f" has incompatible type List[TypedDict({'x': int, 'z': str})]; expected "A"
+[builtins fixtures/dict.pyi]
+
+
+-- Join
+
+[case testJoinOfTypedDictHasOnlyCommonKeysAndNewFallback]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+Point3D = TypedDict('Point3D', {'x': int, 'y': int, 'z': int})
+p1 = TaggedPoint(type='2d', x=0, y=0)
+p2 = Point3D(x=1, y=1, z=1)
+joined_points = [p1, p2][0]
+reveal_type(p1.values())   # E: Revealed type is 'typing.Iterable[builtins.object*]'
+reveal_type(p2.values())   # E: Revealed type is 'typing.Iterable[builtins.int*]'
+reveal_type(joined_points)  # E: Revealed type is 'TypedDict({'x': builtins.int, 'y': builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testJoinOfTypedDictRemovesNonequivalentKeys]
+from mypy_extensions import TypedDict
+CellWithInt = TypedDict('CellWithInt', {'value': object, 'meta': int})
+CellWithObject = TypedDict('CellWithObject', {'value': object, 'meta': object})
+c1 = CellWithInt(value=1, meta=42)
+c2 = CellWithObject(value=2, meta='turtle doves')
+joined_cells = [c1, c2]
+reveal_type(c1)             # E: Revealed type is 'TypedDict('__main__.CellWithInt', {'value': builtins.object, 'meta': builtins.int})'
+reveal_type(c2)             # E: Revealed type is 'TypedDict('__main__.CellWithObject', {'value': builtins.object, 'meta': builtins.object})'
+reveal_type(joined_cells)   # E: Revealed type is 'builtins.list[TypedDict({'value': builtins.object}, fallback=typing.Mapping[builtins.str, builtins.object])]'
+[builtins fixtures/dict.pyi]
+
+[case testJoinOfDisjointTypedDictsIsEmptyTypedDict]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+Cell = TypedDict('Cell', {'value': object})
+d1 = Point(x=0, y=0)
+d2 = Cell(value='pear tree')
+joined_dicts = [d1, d2]
+reveal_type(d1)             # E: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})'
+reveal_type(d2)             # E: Revealed type is 'TypedDict('__main__.Cell', {'value': builtins.object})'
+reveal_type(joined_dicts)   # E: Revealed type is 'builtins.list[TypedDict({}, fallback=typing.Mapping[builtins.str, <nothing>])]'
+[builtins fixtures/dict.pyi]
+
+[case testJoinOfTypedDictWithCompatibleMappingIsMapping]
+from mypy_extensions import TypedDict
+from typing import Mapping
+Cell = TypedDict('Cell', {'value': int})
+left = Cell(value=42)
+right = {'score': 999}  # type: Mapping[str, int]
+joined1 = [left, right]
+joined2 = [right, left]
+reveal_type(joined1)  # E: Revealed type is 'builtins.list[typing.Mapping*[builtins.str, builtins.int]]'
+reveal_type(joined2)  # E: Revealed type is 'builtins.list[typing.Mapping*[builtins.str, builtins.int]]'
+[builtins fixtures/dict.pyi]
+
+[case testJoinOfTypedDictWithCompatibleMappingSupertypeIsSupertype]
+from mypy_extensions import TypedDict
+from typing import Sized
+Cell = TypedDict('Cell', {'value': int})
+left = Cell(value=42)
+right = {'score': 999}  # type: Sized
+joined1 = [left, right]
+joined2 = [right, left]
+reveal_type(joined1)  # E: Revealed type is 'builtins.list[typing.Sized*]'
+reveal_type(joined2)  # E: Revealed type is 'builtins.list[typing.Sized*]'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testJoinOfTypedDictWithIncompatibleMappingIsObject]
+from mypy_extensions import TypedDict
+from typing import Mapping
+Cell = TypedDict('Cell', {'value': int})
+left = Cell(value=42)
+right = {'score': 'zero'}  # type: Mapping[str, str]
+joined1 = [left, right]
+joined2 = [right, left]
+reveal_type(joined1)  # E: Revealed type is 'builtins.list[builtins.object*]'
+reveal_type(joined2)  # E: Revealed type is 'builtins.list[builtins.object*]'
+[builtins fixtures/dict.pyi]
+
+[case testJoinOfTypedDictWithIncompatibleTypeIsObject]
+from mypy_extensions import TypedDict
+from typing import Mapping
+Cell = TypedDict('Cell', {'value': int})
+left = Cell(value=42)
+right = 42
+joined1 = [left, right]
+joined2 = [right, left]
+reveal_type(joined1)  # E: Revealed type is 'builtins.list[builtins.object*]'
+reveal_type(joined2)  # E: Revealed type is 'builtins.list[builtins.object*]'
+[builtins fixtures/dict.pyi]
+
+
+-- Meet
+
+[case testMeetOfTypedDictsWithCompatibleCommonKeysHasAllKeysAndNewFallback]
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable
+XY = TypedDict('XY', {'x': int, 'y': int})
+YZ = TypedDict('YZ', {'y': int, 'z': int})
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: XY, y: YZ) -> None: pass
+reveal_type(f(g))  # E: Revealed type is 'TypedDict({'x': builtins.int, 'y': builtins.int, 'z': builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+
+[case testMeetOfTypedDictsWithIncompatibleCommonKeysIsUninhabited]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable
+XYa = TypedDict('XYa', {'x': int, 'y': int})
+YbZ = TypedDict('YbZ', {'y': object, 'z': int})
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: XYa, y: YbZ) -> None: pass
+reveal_type(f(g))  # E: Revealed type is '<nothing>'
+[builtins fixtures/dict.pyi]
+
+[case testMeetOfTypedDictsWithNoCommonKeysHasAllKeysAndNewFallback]
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable
+X = TypedDict('X', {'x': int})
+Z = TypedDict('Z', {'z': int})
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: X, y: Z) -> None: pass
+reveal_type(f(g))  # E: Revealed type is 'TypedDict({'x': builtins.int, 'z': builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+
+# TODO: It would be more accurate for the meet to be TypedDict instead.
+[case testMeetOfTypedDictWithCompatibleMappingIsUninhabitedForNow]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable, Mapping
+X = TypedDict('X', {'x': int})
+M = Mapping[str, int]
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: X, y: M) -> None: pass
+reveal_type(f(g))  # E: Revealed type is '<nothing>'
+[builtins fixtures/dict.pyi]
+
+[case testMeetOfTypedDictWithIncompatibleMappingIsUninhabited]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable, Mapping
+X = TypedDict('X', {'x': int})
+M = Mapping[str, str]
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: X, y: M) -> None: pass
+reveal_type(f(g))  # E: Revealed type is '<nothing>'
+[builtins fixtures/dict.pyi]
+
+# TODO: It would be more accurate for the meet to be TypedDict instead.
+[case testMeetOfTypedDictWithCompatibleMappingSuperclassIsUninhabitedForNow]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable, Iterable
+X = TypedDict('X', {'x': int})
+I = Iterable[str]
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: X, y: I) -> None: pass
+reveal_type(f(g))  # E: Revealed type is '<nothing>'
+[builtins fixtures/dict.pyi]
+
+[case testMeetOfTypedDictsWithNonTotal]
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable
+XY = TypedDict('XY', {'x': int, 'y': int}, total=False)
+YZ = TypedDict('YZ', {'y': int, 'z': int}, total=False)
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: XY, y: YZ) -> None: pass
+reveal_type(f(g))  # E: Revealed type is 'TypedDict({'x'?: builtins.int, 'y'?: builtins.int, 'z'?: builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+
+[case testMeetOfTypedDictsWithNonTotalAndTotal]
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable
+XY = TypedDict('XY', {'x': int}, total=False)
+YZ = TypedDict('YZ', {'y': int, 'z': int})
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: XY, y: YZ) -> None: pass
+reveal_type(f(g))  # E: Revealed type is 'TypedDict({'x'?: builtins.int, 'y': builtins.int, 'z': builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+
+[case testMeetOfTypedDictsWithIncompatibleNonTotalAndTotal]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable
+XY = TypedDict('XY', {'x': int, 'y': int}, total=False)
+YZ = TypedDict('YZ', {'y': int, 'z': int})
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: XY, y: YZ) -> None: pass
+reveal_type(f(g)) # E: Revealed type is '<nothing>'
+[builtins fixtures/dict.pyi]
+
+
+-- Constraint Solver
+
+[case testTypedDictConstraintsAgainstIterable]
+from typing import TypeVar, Iterable
+from mypy_extensions import TypedDict
+T = TypeVar('T')
+def f(x: Iterable[T]) -> T: pass
+A = TypedDict('A', {'x': int})
+a: A
+reveal_type(f(a)) # E: Revealed type is 'builtins.str*'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+-- TODO: Figure out some way to trigger the ConstraintBuilderVisitor.visit_typeddict_type() path.
+
+
+-- Special Method: __getitem__
+
+[case testCanGetItemOfTypedDictWithValidStringLiteralKey]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+reveal_type(p['type'])  # E: Revealed type is 'builtins.str'
+reveal_type(p['x'])     # E: Revealed type is 'builtins.int'
+reveal_type(p['y'])     # E: Revealed type is 'builtins.int'
+[builtins fixtures/dict.pyi]
+
+[case testCanGetItemOfTypedDictWithValidBytesOrUnicodeLiteralKey]
+# flags: --python-version 2.7
+from mypy_extensions import TypedDict
+Cell = TypedDict('Cell', {'value': int})
+c = Cell(value=42)
+reveal_type(c['value'])   # E: Revealed type is 'builtins.int'
+reveal_type(c[u'value'])  # E: Revealed type is 'builtins.int'
+[builtins_py2 fixtures/dict.pyi]
+
+[case testCannotGetItemOfTypedDictWithInvalidStringLiteralKey]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p: TaggedPoint
+p['z']  # E: TypedDict "TaggedPoint" has no key 'z'
+[builtins fixtures/dict.pyi]
+
+[case testCannotGetItemOfAnonymousTypedDictWithInvalidStringLiteralKey]
+from typing import TypeVar
+from mypy_extensions import TypedDict
+A = TypedDict('A', {'x': str, 'y': int, 'z': str})
+B = TypedDict('B', {'x': str, 'z': int})
+C = TypedDict('C', {'x': str, 'y': int, 'z': int})
+T = TypeVar('T')
+def join(x: T, y: T) -> T: return x
+ab = join(A(x='', y=1, z=''), B(x='', z=1))
+ac = join(A(x='', y=1, z=''), C(x='', y=0, z=1))
+ab['y']  # E: 'y' is not a valid TypedDict key; expected one of ('x')
+ac['a']  # E: 'a' is not a valid TypedDict key; expected one of ('x', 'y')
+[builtins fixtures/dict.pyi]
+
+[case testCannotGetItemOfTypedDictWithNonLiteralKey]
+from mypy_extensions import TypedDict
+from typing import Union
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+def get_coordinate(p: TaggedPoint, key: str) -> Union[str, int]:
+    return p[key]  # E: TypedDict key must be a string literal; expected one of ('type', 'x', 'y')
+[builtins fixtures/dict.pyi]
+
+
+-- Special Method: __setitem__
+
+[case testCanSetItemOfTypedDictWithValidStringLiteralKeyAndCompatibleValueType]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+p['type'] = 'two_d'
+p['x'] = 1
+[builtins fixtures/dict.pyi]
+
+[case testCannotSetItemOfTypedDictWithIncompatibleValueType]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+p['x'] = 'y'  # E: Argument 2 has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testCannotSetItemOfTypedDictWithInvalidStringLiteralKey]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+p['z'] = 1  # E: TypedDict "TaggedPoint" has no key 'z'
+[builtins fixtures/dict.pyi]
+
+[case testCannotSetItemOfTypedDictWithNonLiteralKey]
+from mypy_extensions import TypedDict
+from typing import Union
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+def set_coordinate(p: TaggedPoint, key: str, value: int) -> None:
+    p[key] = value  # E: TypedDict key must be a string literal; expected one of ('type', 'x', 'y')
+[builtins fixtures/dict.pyi]
+
+
+-- isinstance
+
+[case testTypedDictAndInstance]
+from mypy_extensions import TypedDict
+D = TypedDict('D', {'x': int})
+d: object
+if isinstance(d, D):  # E: Cannot use isinstance() with a TypedDict type
+    reveal_type(d) # E: Revealed type is '__main__.D'
+[builtins fixtures/isinstancelist.pyi]
+
+
+-- Scoping
+
+[case testTypedDictInClassNamespace]
+# https://github.com/python/mypy/pull/2553#issuecomment-266474341
+from mypy_extensions import TypedDict
+class C:
+    def f(self):
+        A = TypedDict('A', {'x': int})
+    def g(self):
+        A = TypedDict('A', {'y': int})
+C.A  # E: Type[C] has no attribute "A"
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictInFunction]
+from mypy_extensions import TypedDict
+def f() -> None:
+    A = TypedDict('A', {'x': int})
+A  # E: Name 'A' is not defined
+[builtins fixtures/dict.pyi]
+
+
+-- Union simplification / proper subtype checks
+
+[case testTypedDictUnionSimplification]
+from typing import TypeVar, Union, Any, cast
+from mypy_extensions import TypedDict
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+C = TypedDict('C', {'a': int})
+D = TypedDict('D', {'a': int, 'b': int})
+E = TypedDict('E', {'a': str})
+F = TypedDict('F', {'x': int})
+G = TypedDict('G', {'a': Any})
+
+c = C(a=1)
+d = D(a=1, b=1)
+e = E(a='')
+f = F(x=1)
+g = G(a=cast(Any, 1))  # Work around #2610
+
+reveal_type(u(d, d)) # E: Revealed type is 'TypedDict('__main__.D', {'a': builtins.int, 'b': builtins.int})'
+reveal_type(u(c, d)) # E: Revealed type is 'TypedDict('__main__.C', {'a': builtins.int})'
+reveal_type(u(d, c)) # E: Revealed type is 'TypedDict('__main__.C', {'a': builtins.int})'
+reveal_type(u(c, e)) # E: Revealed type is 'Union[TypedDict('__main__.E', {'a': builtins.str}), TypedDict('__main__.C', {'a': builtins.int})]'
+reveal_type(u(e, c)) # E: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), TypedDict('__main__.E', {'a': builtins.str})]'
+reveal_type(u(c, f)) # E: Revealed type is 'Union[TypedDict('__main__.F', {'x': builtins.int}), TypedDict('__main__.C', {'a': builtins.int})]'
+reveal_type(u(f, c)) # E: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), TypedDict('__main__.F', {'x': builtins.int})]'
+reveal_type(u(c, g)) # E: Revealed type is 'Union[TypedDict('__main__.G', {'a': Any}), TypedDict('__main__.C', {'a': builtins.int})]'
+reveal_type(u(g, c)) # E: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), TypedDict('__main__.G', {'a': Any})]'
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictUnionSimplification2]
+from typing import TypeVar, Union, Mapping, Any
+from mypy_extensions import TypedDict
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+C = TypedDict('C', {'a': int, 'b': int})
+
+c = C(a=1, b=1)
+m_s_i: Mapping[str, int]
+m_s_s: Mapping[str, str]
+m_i_i: Mapping[int, int]
+m_s_a: Mapping[str, Any]
+
+reveal_type(u(c, m_s_i)) # E: Revealed type is 'typing.Mapping*[builtins.str, builtins.int]'
+reveal_type(u(m_s_i, c)) # E: Revealed type is 'typing.Mapping*[builtins.str, builtins.int]'
+reveal_type(u(c, m_s_s)) # E: Revealed type is 'Union[typing.Mapping*[builtins.str, builtins.str], TypedDict('__main__.C', {'a': builtins.int, 'b': builtins.int})]'
+reveal_type(u(c, m_i_i)) # E: Revealed type is 'Union[typing.Mapping*[builtins.int, builtins.int], TypedDict('__main__.C', {'a': builtins.int, 'b': builtins.int})]'
+reveal_type(u(c, m_s_a)) # E: Revealed type is 'Union[typing.Mapping*[builtins.str, Any], TypedDict('__main__.C', {'a': builtins.int, 'b': builtins.int})]'
+[builtins fixtures/dict.pyi]
+
+
+-- Use dict literals
+
+[case testTypedDictDictLiterals]
+from mypy_extensions import TypedDict
+
+Point = TypedDict('Point', {'x': int, 'y': int})
+
+def f(p: Point) -> None:
+    p = {'x': 2, 'y': 3}
+    p = {'x': 2}  # E: Key 'y' missing for TypedDict "Point"
+    p = dict(x=2, y=3)
+
+f({'x': 1, 'y': 3})
+f({'x': 1, 'y': 'z'})  # E: Incompatible types (expression has type "str", TypedDict item "y" has type "int")
+
+f(dict(x=1, y=3))
+f(dict(x=1, y=3, z=4))  # E: Extra key 'z' for TypedDict "Point"
+f(dict(x=1, y=3, z=4, a=5))  # E: Extra keys ('z', 'a') for TypedDict "Point"
+
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictExplicitTypes]
+from mypy_extensions import TypedDict
+
+Point = TypedDict('Point', {'x': int, 'y': int})
+
+p1a: Point = {'x': 'hi'}  # E: Key 'y' missing for TypedDict "Point"
+p1b: Point = {}           # E: Keys ('x', 'y') missing for TypedDict "Point"
+
+p2: Point
+p2 = dict(x='bye')  # E: Key 'y' missing for TypedDict "Point"
+
+p3 = Point(x=1, y=2)
+p3 = {'x': 'hi'}  # E: Key 'y' missing for TypedDict "Point"
+
+p4: Point = {'x': 1, 'y': 2}
+
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateAnonymousTypedDictInstanceUsingDictLiteralWithExtraItems]
+from mypy_extensions import TypedDict
+from typing import TypeVar
+A = TypedDict('A', {'x': int, 'y': int})
+B = TypedDict('B', {'x': int, 'y': str})
+T = TypeVar('T')
+def join(x: T, y: T) -> T: return x
+ab = join(A(x=1, y=1), B(x=1, y=''))
+ab = {'x': 1, 'z': 1} # E: Expected TypedDict key 'x' but found keys ('x', 'z')
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateAnonymousTypedDictInstanceUsingDictLiteralWithMissingItems]
+from mypy_extensions import TypedDict
+from typing import TypeVar
+A = TypedDict('A', {'x': int, 'y': int, 'z': int})
+B = TypedDict('B', {'x': int, 'y': int, 'z': str})
+T = TypeVar('T')
+def join(x: T, y: T) -> T: return x
+ab = join(A(x=1, y=1, z=1), B(x=1, y=1, z=''))
+ab = {} # E: Expected TypedDict keys ('x', 'y') but found no keys
+[builtins fixtures/dict.pyi]
+
+
+-- Other TypedDict methods
+
+[case testTypedDictGetMethod]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+class A: pass
+D = TypedDict('D', {'x': int, 'y': str})
+d: D
+reveal_type(d.get('x')) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+reveal_type(d.get('y')) # E: Revealed type is 'Union[builtins.str, builtins.None]'
+reveal_type(d.get('x', A())) # E: Revealed type is 'Union[builtins.int, __main__.A]'
+reveal_type(d.get('x', 1)) # E: Revealed type is 'builtins.int'
+reveal_type(d.get('y', None)) # E: Revealed type is 'Union[builtins.str, builtins.None]'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictGetMethodTypeContext]
+# flags: --strict-optional
+from typing import List
+from mypy_extensions import TypedDict
+class A: pass
+D = TypedDict('D', {'x': List[int], 'y': int})
+d: D
+reveal_type(d.get('x', [])) # E: Revealed type is 'builtins.list[builtins.int]'
+d.get('x', ['x']) # E: List item 0 has incompatible type "str"
+a = ['']
+reveal_type(d.get('x', a)) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str*]]'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictGetMethodInvalidArgs]
+from mypy_extensions import TypedDict
+D = TypedDict('D', {'x': int, 'y': str})
+d: D
+d.get() # E: No overload variant of "get" of "Mapping" matches argument types []
+d.get('x', 1, 2) # E: No overload variant of "get" of "Mapping" matches argument types [builtins.str, builtins.int, builtins.int]
+x = d.get('z') # E: TypedDict "D" has no key 'z'
+reveal_type(x) # E: Revealed type is 'Any'
+s = ''
+y = d.get(s)
+reveal_type(y) # E: Revealed type is 'builtins.object*'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictMissingMethod]
+from mypy_extensions import TypedDict
+D = TypedDict('D', {'x': int, 'y': str})
+d: D
+d.bad(1) # E: "D" has no attribute "bad"
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictChainedGetMethodWithDictFallback]
+from mypy_extensions import TypedDict
+D = TypedDict('D', {'x': int, 'y': str})
+E = TypedDict('E', {'d': D})
+p = E(d=D(x=0, y=''))
+reveal_type(p.get('d', {'x': 1, 'y': ''})) # E: Revealed type is 'TypedDict('__main__.D', {'x': builtins.int, 'y': builtins.str})'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictGetDefaultParameterStillTypeChecked]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+p.get('x', 1 + 'y')     # E: Unsupported operand types for + ("int" and "str")
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictChainedGetWithEmptyDictDefault]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+C = TypedDict('C', {'a': int})
+D = TypedDict('D', {'x': C, 'y': str})
+d: D
+reveal_type(d.get('x', {})) \
+    # E: Revealed type is 'TypedDict('__main__.C', {'a'?: builtins.int})'
+reveal_type(d.get('x', None)) \
+    # E: Revealed type is 'Union[TypedDict('__main__.C', {'a': builtins.int}), builtins.None]'
+reveal_type(d.get('x', {}).get('a')) # E: Revealed type is 'Union[builtins.int, builtins.None]'
+reveal_type(d.get('x', {})['a']) # E: Revealed type is 'builtins.int'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+
+-- Totality (the "total" keyword argument)
+
+[case testTypedDictWithTotalTrue]
+from mypy_extensions import TypedDict
+D = TypedDict('D', {'x': int, 'y': str}, total=True)
+d: D
+reveal_type(d) \
+    # E: Revealed type is 'TypedDict('__main__.D', {'x': builtins.int, 'y': builtins.str})'
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictWithInvalidTotalArgument]
+from mypy_extensions import TypedDict
+A = TypedDict('A', {'x': int}, total=0) # E: TypedDict() "total" argument must be True or False
+B = TypedDict('B', {'x': int}, total=bool) # E: TypedDict() "total" argument must be True or False
+C = TypedDict('C', {'x': int}, x=False) # E: Unexpected keyword argument "x" for "TypedDict"
+D = TypedDict('D', {'x': int}, False) # E: Unexpected arguments to TypedDict()
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictWithTotalFalse]
+from mypy_extensions import TypedDict
+D = TypedDict('D', {'x': int, 'y': str}, total=False)
+def f(d: D) -> None:
+    reveal_type(d) # E: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})'
+f({})
+f({'x': 1})
+f({'y': ''})
+f({'x': 1, 'y': ''})
+f({'x': 1, 'z': ''}) # E: Extra key 'z' for TypedDict "D"
+f({'x': ''}) # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int")
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictConstructorWithTotalFalse]
+from mypy_extensions import TypedDict
+D = TypedDict('D', {'x': int, 'y': str}, total=False)
+def f(d: D) -> None: pass
+reveal_type(D()) # E: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})'
+reveal_type(D(x=1)) # E: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})'
+f(D(y=''))
+f(D(x=1, y=''))
+f(D(x=1, z='')) # E: Extra key 'z' for TypedDict "D"
+f(D(x='')) # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int")
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictIndexingWithNonRequiredKey]
+from mypy_extensions import TypedDict
+D = TypedDict('D', {'x': int, 'y': str}, total=False)
+d: D
+reveal_type(d['x']) # E: Revealed type is 'builtins.int'
+reveal_type(d['y']) # E: Revealed type is 'builtins.str'
+reveal_type(d.get('x')) # E: Revealed type is 'builtins.int'
+reveal_type(d.get('y')) # E: Revealed type is 'builtins.str'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictSubtypingWithTotalFalse]
+from mypy_extensions import TypedDict
+A = TypedDict('A', {'x': int})
+B = TypedDict('B', {'x': int}, total=False)
+C = TypedDict('C', {'x': int, 'y': str}, total=False)
+def fa(a: A) -> None: pass
+def fb(b: B) -> None: pass
+def fc(c: C) -> None: pass
+a: A
+b: B
+c: C
+fb(b)
+fc(c)
+fb(c)
+fb(a) # E: Argument 1 to "fb" has incompatible type "A"; expected "B"
+fa(b) # E: Argument 1 to "fa" has incompatible type "B"; expected "A"
+fc(b) # E: Argument 1 to "fc" has incompatible type "B"; expected "C"
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictJoinWithTotalFalse]
+from typing import TypeVar
+from mypy_extensions import TypedDict
+A = TypedDict('A', {'x': int})
+B = TypedDict('B', {'x': int}, total=False)
+C = TypedDict('C', {'x': int, 'y': str}, total=False)
+T = TypeVar('T')
+def j(x: T, y: T) -> T: return x
+a: A
+b: B
+c: C
+reveal_type(j(a, b)) \
+    # E: Revealed type is 'TypedDict({}, fallback=typing.Mapping[builtins.str, <nothing>])'
+reveal_type(j(b, b)) \
+    # E: Revealed type is 'TypedDict({'x'?: builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])'
+reveal_type(j(c, c)) \
+    # E: Revealed type is 'TypedDict({'x'?: builtins.int, 'y'?: builtins.str}, fallback=typing.Mapping[builtins.str, builtins.object])'
+reveal_type(j(b, c)) \
+    # E: Revealed type is 'TypedDict({'x'?: builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])'
+reveal_type(j(c, b)) \
+    # E: Revealed type is 'TypedDict({'x'?: builtins.int}, fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictClassWithTotalArgument]
+from mypy_extensions import TypedDict
+class D(TypedDict, total=False):
+    x: int
+    y: str
+d: D
+reveal_type(d) # E: Revealed type is 'TypedDict('__main__.D', {'x'?: builtins.int, 'y'?: builtins.str})'
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictClassWithInvalidTotalArgument]
+from mypy_extensions import TypedDict
+class D(TypedDict, total=1): # E: Value of "total" must be True or False
+    x: int
+class E(TypedDict, total=bool): # E: Value of "total" must be True or False
+    x: int
+class F(TypedDict, total=xyz): # E: Value of "total" must be True or False \
+                               # E: Name 'xyz' is not defined
+    x: int
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictClassInheritanceWithTotalArgument]
+from mypy_extensions import TypedDict
+class A(TypedDict):
+    x: int
+class B(TypedDict, A, total=False):
+    y: int
+class C(TypedDict, B, total=True):
+    z: str
+c: C
+reveal_type(c) # E: Revealed type is 'TypedDict('__main__.C', {'x': builtins.int, 'y'?: builtins.int, 'z': builtins.str})'
+[builtins fixtures/dict.pyi]
+
+[case testNonTotalTypedDictInErrorMessages]
+from mypy_extensions import TypedDict
+
+A = TypedDict('A', {'x': int, 'y': str}, total=False)
+B = TypedDict('B', {'x': int, 'z': str, 'a': int}, total=False)
+C = TypedDict('C', {'x': int, 'z': str, 'a': str}, total=False)
+a: A
+b: B
+c: C
+
+def f(a: A) -> None: pass
+
+l = [a, b]  # Join generates an anonymous TypedDict
+f(l) # E: Argument 1 to "f" has incompatible type List[TypedDict({'x'?: int})]; expected "A"
+ll = [b, c]
+f(ll) # E: Argument 1 to "f" has incompatible type List[TypedDict({'x'?: int, 'z'?: str})]; expected "A"
+[builtins fixtures/dict.pyi]
+
+
+-- Create Type (Errors)
+
+[case testCannotCreateTypedDictTypeWithTooFewArguments]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point')  # E: Too few arguments for TypedDict()
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithTooManyArguments]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int}, dict)  # E: Unexpected arguments to TypedDict()
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithInvalidName]
+from mypy_extensions import TypedDict
+Point = TypedDict(dict, {'x': int, 'y': int})  # E: TypedDict() expects a string literal as the first argument
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithInvalidItems]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x'})  # E: TypedDict() expects a dictionary literal as the second argument
+[builtins fixtures/dict.pyi]
+
+-- NOTE: The following code works at runtime but is not yet supported by mypy.
+--       Keyword arguments may potentially be supported in the future.
+[case testCannotCreateTypedDictTypeWithNonpositionalArgs]
+from mypy_extensions import TypedDict
+Point = TypedDict(typename='Point', fields={'x': int, 'y': int})  # E: Unexpected arguments to TypedDict()
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithInvalidItemName]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {int: int, int: int})  # E: Invalid TypedDict() field name
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithInvalidItemType]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': 1, 'y': 1})  # E: Invalid field type
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithInvalidName]
+from mypy_extensions import TypedDict
+X = TypedDict('Y', {'x': int})  # E: First argument 'Y' to TypedDict() does not match variable name 'X'
+[builtins fixtures/dict.pyi]
+
+
+-- Overloading
+
+[case testTypedDictOverloading]
+from typing import overload, Iterable
+from mypy_extensions import TypedDict
+
+A = TypedDict('A', {'x': int})
+
+ at overload
+def f(x: Iterable[str]) -> str: ...
+ at overload
+def f(x: int) -> int: ...
+def f(x): pass
+
+a: A
+reveal_type(f(a))  # E: Revealed type is 'builtins.str'
+reveal_type(f(1))  # E: Revealed type is 'builtins.int'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictOverloading2]
+from typing import overload, Iterable
+from mypy_extensions import TypedDict
+
+A = TypedDict('A', {'x': int})
+
+ at overload
+def f(x: Iterable[int]) -> None: ...
+ at overload
+def f(x: int) -> None: ...
+def f(x): pass
+
+a: A
+f(a)  # E: Argument 1 to "f" has incompatible type "A"; expected Iterable[int]
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictOverloading3]
+from typing import overload
+from mypy_extensions import TypedDict
+
+A = TypedDict('A', {'x': int})
+
+ at overload
+def f(x: str) -> None: ...
+ at overload
+def f(x: int) -> None: ...
+def f(x): pass
+
+a: A
+f(a)  # E: No overload variant of "f" matches argument types [TypedDict('__main__.A', {'x': builtins.int})]
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictOverloading4]
+from typing import overload
+from mypy_extensions import TypedDict
+
+A = TypedDict('A', {'x': int})
+B = TypedDict('B', {'x': str})
+
+ at overload
+def f(x: A) -> int: ...
+ at overload
+def f(x: int) -> str: ...
+def f(x): pass
+
+a: A
+b: B
+reveal_type(f(a)) # E: Revealed type is 'builtins.int'
+reveal_type(f(1)) # E: Revealed type is 'builtins.str'
+f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictOverloading5]
+from typing import overload
+from mypy_extensions import TypedDict
+
+A = TypedDict('A', {'x': int})
+B = TypedDict('B', {'y': str})
+C = TypedDict('C', {'y': int})
+
+ at overload
+def f(x: A) -> None: ...
+ at overload
+def f(x: B) -> None: ...
+def f(x): pass
+
+a: A
+b: B
+c: C
+f(a)
+f(b)
+f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "A"
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testTypedDictOverloading6]
+from typing import overload
+from mypy_extensions import TypedDict
+
+A = TypedDict('A', {'x': int})
+B = TypedDict('B', {'y': str})
+
+ at overload
+def f(x: A) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(x: B) -> str: ...
+def f(x): pass
+
+a: A
+b: B
+reveal_type(f(a)) # E: Revealed type is 'Any'
+reveal_type(f(b)) # E: Revealed type is 'Any'
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+
+-- Special cases
+
+[case testForwardReferenceInTypedDict]
+from typing import Mapping
+from mypy_extensions import TypedDict
+X = TypedDict('X', {'b': 'B', 'c': 'C'})
+class B: pass
+class C(B): pass
+x: X
+reveal_type(x) # E: Revealed type is 'TypedDict('__main__.X', {'b': __main__.B, 'c': __main__.C})'
+m1: Mapping[str, B] = x
+m2: Mapping[str, C] = x # E: Incompatible types in assignment (expression has type "X", variable has type Mapping[str, C])
+[builtins fixtures/dict.pyi]
+
+[case testForwardReferenceInClassTypedDict]
+from typing import Mapping
+from mypy_extensions import TypedDict
+class X(TypedDict):
+    b: 'B'
+    c: 'C'
+class B: pass
+class C(B): pass
+x: X
+reveal_type(x) # E: Revealed type is 'TypedDict('__main__.X', {'b': __main__.B, 'c': __main__.C})'
+m1: Mapping[str, B] = x
+m2: Mapping[str, C] = x # E: Incompatible types in assignment (expression has type "X", variable has type Mapping[str, C])
+[builtins fixtures/dict.pyi]
+
+[case testForwardReferenceToTypedDictInTypedDict]
+from typing import Mapping
+from mypy_extensions import TypedDict
+# Forward references don't quite work yet
+X = TypedDict('X', {'a': 'A'}) # E: Invalid type "__main__.A"
+A = TypedDict('A', {'b': int})
+x: X
+reveal_type(x) # E: Revealed type is 'TypedDict('__main__.X', {'a': TypedDict('__main__.A', {'b': builtins.int})})'
+reveal_type(x['a']['b']) # E: Revealed type is 'builtins.int'
+[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test
new file mode 100644
index 0000000..fa0453e
--- /dev/null
+++ b/test-data/unit/check-typevar-values.test
@@ -0,0 +1,517 @@
+-- Test cases for type variables with values restriction.
+
+
+[case testCallGenericFunctionWithTypeVarValueRestriction]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> None: pass
+f(1)
+f('x')
+f(object()) # E: Type argument 1 of "f" has incompatible value "object"
+
+[case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext]
+from typing import TypeVar, List
+T = TypeVar('T', int, str)
+def f(x: T) -> List[T]: pass
+i = [1]
+s = ['x']
+o = [object()]
+i = f(1)
+s = f('')
+o = f(1) # E: Type argument 1 of "f" has incompatible value "object"
+[builtins fixtures/list.pyi]
+
+[case testCallGenericFunctionWithTypeVarValueRestrictionAndAnyArgs]
+from typing import TypeVar, Any, cast
+T = TypeVar('T', int, str)
+def f(x: T) -> None: pass
+f(cast(Any, object()))
+[out]
+
+[case testCallGenericFunctionWithTypeVarValueRestrictionInDynamicFunc]
+from typing import TypeVar, Any
+T = TypeVar('T', int, str)
+def f(x: T) -> None: pass
+def g():
+    f(object())
+[out]
+
+[case testCallGenericFunctionWithTypeVarValueRestrictionUsingSubtype]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> None: pass
+class S(str): pass
+f(S())
+[out]
+
+[case testCheckGenericFunctionBodyWithTypeVarValues]
+from typing import TypeVar
+class A:
+    def f(self, x: int) -> A: return self
+class B:
+    def f(self, x: int) -> B: return self
+AB = TypeVar('AB', A, B)
+def f(x: AB) -> AB:
+    x = x.f(1)
+    return x.f(1)
+
+[case testCheckGenericFunctionBodyWithTypeVarValues2]
+from typing import TypeVar
+class A:
+    def f(self) -> A: return A()
+    def g(self) -> B: return B()
+class B:
+    def f(self) -> A: return A()
+    def g(self) -> B: return B()
+AB = TypeVar('AB', A, B)
+def f(x: AB) -> AB:
+    return x.f() # Error
+def g(x: AB) -> AB:
+    return x.g() # Error
+[out]
+main:10: error: Incompatible return value type (got "A", expected "B")
+main:12: error: Incompatible return value type (got "B", expected "A")
+
+[case testTypeInferenceAndTypeVarValues]
+from typing import TypeVar
+class A:
+    def f(self) -> A: return self
+    def g(self) -> B: return B()
+class B:
+    def f(self) -> B: return self
+    def g(self) -> B: return B()
+AB = TypeVar('AB', A, B)
+def f(x: AB) -> AB:
+    y = x
+    if y:
+        return y.f()
+    else:
+        return y.g() # E: Incompatible return value type (got "B", expected "A")
+[out]
+
+[case testTypeDeclaredBasedOnTypeVarWithValues]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    a = None  # type: T
+    b = None # type: T
+    a = x
+    b = x
+    a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    b = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    return x
+[out]
+
+[case testIsinstanceAndTypeVarValues]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    if isinstance(x, int):
+        return 2
+    return x
+def g(x: T) -> T:
+    if isinstance(x, str):
+        return ''
+    return x
+def h(x: T) -> T:
+    if isinstance(x, int):
+        return '' # E: Incompatible return value type (got "str", expected "int")
+    return x
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndTypeVarValues2]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    if isinstance(x, int):
+        return 2
+    else:
+        return ''
+def g(x: T) -> T:
+    if isinstance(x, int):
+        return '' # E: Incompatible return value type (got "str", expected "int")
+    else:
+        return 2  # E: Incompatible return value type (got "int", expected "str")
+    return x
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndTypeVarValues3]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    if isinstance(x, int):
+        y = 1
+    else:
+        y = ''
+    return y
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceAndTypeVarValues4]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    if isinstance(x, int):
+        y = 1
+    else:
+        y = object()
+    return y # E: Incompatible return value type (got "object", expected "str")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndTypeVarValues5]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    if isinstance(x, int):
+        y = object()
+    else:
+        y = ''
+    return y # E: Incompatible return value type (got "object", expected "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceWithUserDefinedTypeAndTypeVarValues]
+from typing import TypeVar
+class A: pass
+class B: pass
+T = TypeVar('T', A, B)
+def f(x: T) -> None:
+    y = x
+    if isinstance(x, A):
+        # This is only checked when x is A, since A and B are not considered overlapping.
+        x = y
+        x = A()
+    else:
+        x = B()
+        x = y
+        x.foo() # E: "B" has no attribute "foo"
+S = TypeVar('S', int, str)
+def g(x: S) -> None:
+    y = x
+    if isinstance(x, int):
+        x = y
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceWithUserDefinedTypeAndTypeVarValues2]
+from typing import TypeVar
+class S(str): pass
+T = TypeVar('T', S, int)
+def f(x: T) -> None:
+    y = x
+    if isinstance(x, S):
+        # This is checked only when type of x is str.
+        x = y
+        x = S()
+        x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "S")
+    else:
+        x = y
+        x = 1
+        x = S() # E: Incompatible types in assignment (expression has type "S", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testTypeVarValuesAndNestedCalls]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(m: T) -> int: pass
+def h(x: int) -> int: pass
+def g(a: T) -> None:
+    h(f(a))
+[out]
+
+[case testGenericTypeWithTypevarValues]
+from typing import TypeVar, Generic, Any
+X = TypeVar('X', int, str)
+class A(Generic[X]): pass
+a = None  # type: A[int]
+b = None  # type: A[str]
+d = None  # type: A[object] # E: Type argument 1 of "A" has incompatible value "object"
+c = None  # type: A[Any]
+
+[case testConstructGenericTypeWithTypevarValuesAndTypeInference]
+from typing import TypeVar, Generic, Any, cast
+X = TypeVar('X', int, str)
+class A(Generic[X]):
+    def __init__(self, x: X) -> None: pass
+A(1)
+A('x')
+A(cast(Any, object()))
+A(object()) # E: Type argument 1 of "A" has incompatible value "object"
+
+[case testGenericTypeWithTypevarValuesAndTypevarArgument]
+from typing import TypeVar, Generic
+class C: pass
+X = TypeVar('X', int, str)
+Y = TypeVar('Y', int, C)
+Z = TypeVar('Z')
+class D(Generic[X]):
+    def __init__(self, x: X) -> None: pass
+def f(x: X) -> None:
+    a = None  # type: D[X]
+def g(x: Y) -> None:
+    a = None  # type: D[Y]
+def h(x: Z) -> None:
+    a = None  # type: D[Z]
+[out]
+main:11: error: Invalid type argument value for "D"
+main:13: error: Type variable "Z" not valid as type argument value for "D"
+
+[case testGenericTypeWithTypevarValuesAndSubtypePromotion]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class S(str): pass
+class C(Generic[X]):
+    def __init__(self, x: X) -> None: pass
+x = None  # type: C[str]
+y = C(S())
+x = y
+y = x
+c_int = C(1) # type: C[int]
+y = c_int # E: Incompatible types in assignment (expression has type C[int], variable has type C[str])
+
+[case testGenericTypeBodyWithTypevarValues]
+from typing import TypeVar, Generic
+class A:
+    def f(self, x: int) -> None: pass
+    def g(self, x: int) -> None: pass
+    def h(self, x: str) -> None: pass
+class B:
+    def f(self, x: int) -> None: pass
+    def g(self, x: str) -> None: pass
+    def h(self, x: int) -> None: pass
+X = TypeVar('X', A, B)
+class C(Generic[X]):
+    def f(self, x: X) -> None:
+        x.f(1)
+        x.g(1) # E: Argument 1 to "g" of "B" has incompatible type "int"; expected "str"
+        x.h(1) # E: Argument 1 to "h" of "A" has incompatible type "int"; expected "str"
+[out]
+
+[case testAttributeInGenericTypeWithTypevarValues1]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class C(Generic[X]):
+    x = None  # type: X
+    def f(self, x: X) -> None:
+        self.x = x
+        self.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[out]
+
+[case testAttributeInGenericTypeWithTypevarValues2]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class C(Generic[X]):
+    x = None  # type: X
+cn = C() # type: C[int]
+cn.x = 1
+cn.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+cs = C() # type: C[str]
+cs.x = ''
+cs.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testInferredAttributeInGenericClassBodyWithTypevarValues]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class C(Generic[X]):
+    x = 1
+C.x = 1
+C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testMultipleClassTypevarsWithValues1]
+from typing import TypeVar, Generic
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def f(self, x: str) -> None: pass
+X = TypeVar('X', A, B)
+Y = TypeVar('Y', int, str)
+class C(Generic[X, Y]):
+    def f(self, x: X, y: Y) -> None:
+        x.f(y)
+[out]
+main:10: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+main:10: error: Argument 1 to "f" of "B" has incompatible type "int"; expected "str"
+
+[case testMultipleClassTypevarsWithValues2]
+from typing import TypeVar, Generic
+class A: pass
+class B: pass
+X = TypeVar('X', A, B)
+Y = TypeVar('Y', int, str)
+class C(Generic[X, Y]): pass
+a = None  # type: C[A, int]
+b = None  # type: C[B, str]
+c = None  # type: C[int, int] # E: Type argument 1 of "C" has incompatible value "int"
+d = None  # type: C[A, A]     # E: Type argument 2 of "C" has incompatible value "A"
+
+[case testCallGenericFunctionUsingMultipleTypevarsWithValues]
+from typing import TypeVar
+class A: pass
+class B: pass
+X = TypeVar('X', A, B)
+Y = TypeVar('Y', int, str)
+def f(x: X, y: Y) -> None: pass
+f(A(), '')
+f(B(), 1)
+f(A(), A()) # E: Type argument 2 of "f" has incompatible value "A"
+f(1, 1) # E: Type argument 1 of "f" has incompatible value "int"
+
+[case testGenericFunctionWithNormalAndRestrictedTypevar]
+from typing import TypeVar, Generic
+X = TypeVar('X')
+Y = TypeVar('Y', int, str)
+class C(Generic[Y]):
+    def __init__(self, y: Y) -> None: pass
+def f(x: X, y: Y, z: int) -> None:
+    C(y)
+    C(x)  # Error
+    z = x # Error
+    z = y # Error
+    y.foo # Error
+[out]
+main:8: error: Type argument 1 of "C" has incompatible value "X"
+main:9: error: Incompatible types in assignment (expression has type "X", variable has type "int")
+main:10: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:11: error: "int" has no attribute "foo"
+main:11: error: "str" has no attribute "foo"
+
+[case testTypeVarWithValueInferredFromObjectReturnTypeContext]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def c1(x: object) -> None: pass
+def c2(x: int) -> None: pass
+def c3(x: str) -> None: pass
+def g(x: T) -> T: pass
+c1(g(''))
+c2(g(1))
+c3(g(''))
+c2(g(''))  # E: Argument 1 to "c2" has incompatible type "str"; expected "int"
+c3(g(1))   # E: Argument 1 to "c3" has incompatible type "int"; expected "str"
+
+[case testTypeVarWithValueInferredFromObjectReturnTypeContext2]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+class ss(str): pass
+def c(x: ss) -> None: pass
+def g(x: T) -> T: pass
+c(g(''))
+c(g(1))
+[out]
+main:6: error: Argument 1 to "c" has incompatible type "str"; expected "ss"
+main:7: error: Argument 1 to "c" has incompatible type "int"; expected "ss"
+
+
+-- Special cases
+-- -------------
+
+
+[case testTypevarValuesSpecialCase1]
+from typing import TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T', int, str)
+class A(Generic[T]):
+    @abstractmethod
+    def f(self) -> 'A[T]': pass
+class B(A[str]):
+    @abstractmethod
+    def f(self) -> 'B': pass
+class C(A[str]):
+    @abstractmethod
+    def f(self) -> int: # E: Return type of "f" incompatible with supertype "A"
+        pass
+[out]
+
+[case testDefaultArgumentValueInGenericClassWithTypevarValues]
+from typing import TypeVar, Generic
+T = TypeVar('T', int, str)
+class C(Generic[T]):
+    def f(self, x: int = None) -> None: pass
+
+[case testTypevarValuesWithOverloadedFunctionSpecialCase]
+from foo import *
+[file foo.pyi]
+from typing import TypeVar, overload, Callable
+
+T = TypeVar('T', int, str)
+def f(x: T) -> None:
+     y = m(g, x)
+     x = y
+     y = object()
+
+A = TypeVar('A')
+R = TypeVar('R')
+def m(f: Callable[[A], R], it: A) -> A: pass
+
+ at overload
+def g(x: int) -> int: return x
+ at overload
+def g(x: str) -> str: return x
+[out]
+tmp/foo.pyi:7: error: Incompatible types in assignment (expression has type "object", variable has type "int")
+tmp/foo.pyi:7: error: Incompatible types in assignment (expression has type "object", variable has type "str")
+
+[case testGenericFunctionSubtypingWithTypevarValues]
+from typing import TypeVar
+class A: pass
+T = TypeVar('T', int, str)
+U = TypeVar('U', str, A, int)
+def f(x: T) -> T: pass
+def g(x: U) -> U: pass
+a = f
+a = f
+a = g
+b = g
+b = g
+b = f # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[U], U])
+
+[case testInnerFunctionWithTypevarValues]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+U = TypeVar('U', int, str)
+def outer(x: T) -> T:
+    def inner(y: T) -> T:
+        return x
+    def inner2(y: U) -> U:
+        return y
+    inner(x)
+    inner(3) # E: Argument 1 to "inner" has incompatible type "int"; expected "str"
+    inner2(x)
+    inner2(3)
+    outer(3)
+    return x
+[out]
+
+[case testInnerFunctionMutualRecursionWithTypevarValues]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def outer(x: T) -> T:
+    def inner1(y: T) -> T:
+        return inner2(y)
+    def inner2(y: T) -> T:
+        return inner1('a') # E: Argument 1 to "inner1" has incompatible type "str"; expected "int"
+    return inner1(x)
+[out]
+
+[case testClassMemberTypeVarInFunctionBody]
+from typing import TypeVar, List
+class C:
+    T = TypeVar('T', bound=int)
+    def f(self, x: T) -> T:
+        L = List[C.T] # this creates a variable, not an alias
+        reveal_type(L)  # E: Revealed type is 'Overload(def () -> builtins.list[T`-1], def (x: typing.Iterable[T`-1]) -> builtins.list[T`-1])'
+        y: C.T = x
+        L().append(x)
+        C.T  # E: Type variable "C.T" cannot be used as an expression
+        A = C.T  # E: Type variable "C.T" cannot be used as an expression
+        return L()[0]
+
+[builtins fixtures/list.pyi]
+
+[case testParameterLessGenericAsRestriction]
+from typing import Sequence, Iterable, TypeVar
+S = TypeVar('S', Sequence, Iterable)
+def my_len(s: S) -> None: pass
+def crash() -> None: my_len((0,))
diff --git a/test-data/unit/check-underscores.test b/test-data/unit/check-underscores.test
new file mode 100644
index 0000000..88f95ef
--- /dev/null
+++ b/test-data/unit/check-underscores.test
@@ -0,0 +1,16 @@
+[case testUnderscoresRequire36]
+# flags: --python-version 3.5
+x = 1000_000  # E: Underscores in numeric literals are only supported in Python 3.6 and greater
+[out]
+
+[case testUnderscoresSyntaxError]
+# flags: --python-version 3.6
+x = 1000_000_  # E: invalid token
+[out]
+
+[case testUnderscoresBasics]
+# flags: --python-version 3.6
+x: int
+x = 1000_000
+x = 0x_FF_FF_FF_FF
+y: str = 1000_000.000_001  # E: Incompatible types in assignment (expression has type "float", variable has type "str")
diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test
new file mode 100644
index 0000000..7dfd225
--- /dev/null
+++ b/test-data/unit/check-unions.test
@@ -0,0 +1,498 @@
+-- Type checking of union types
+
+[case testUnion1]
+from typing import Union
+def f(x: Union[int, str]) -> None:
+    if isinstance(x, int):
+        y = 1
+        y = x
+    elif isinstance(x, str):
+        z = 'a'
+        z = x
+[builtins fixtures/isinstance.pyi]
+
+[case testUnion2]
+from typing import Union
+def f(x: Union[int, str]) -> None:
+    if isinstance(x, int):
+        y = 1
+        y = x
+    else:
+        z = 'a'
+        z = x
+[builtins fixtures/isinstance.pyi]
+
+[case testUnion3]
+from typing import Union
+def f(x: Union[int, str]) -> None:
+    if isinstance(x, int):
+        y = 1
+        y = x
+    else:
+        z = 2
+        z = x # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testUnionAnyIsInstance]
+from typing import Any, Union
+
+def func(v: Union[int, Any]) -> None:
+    if isinstance(v, int):
+        reveal_type(v) # E: Revealed type is 'builtins.int'
+    else:
+        reveal_type(v) # E: Revealed type is 'Any'
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testUnionAttributeAccess]
+from typing import Union
+
+class A: y = 1
+class B: y = 2
+class C: pass
+class D: pass
+
+u = None # type: Union[A, C, D]
+v = None # type: Union[C, D]
+w = None # type: Union[A, B]
+x = None # type: Union[A, C]
+y = None # type: int
+z = None # type: str
+
+y = w.y
+v.y           # E: Item "C" of "Union[C, D]" has no attribute "y" \
+              # E: Item "D" of "Union[C, D]" has no attribute "y"
+u.y           # E: Item "C" of "Union[A, C, D]" has no attribute "y" \
+              # E: Item "D" of "Union[A, C, D]" has no attribute "y"
+z = w.y       # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+w.y = 'a'     # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+y = x.y       # E: Item "C" of "Union[A, C]" has no attribute "y"
+zz = x.y      # E: Item "C" of "Union[A, C]" has no attribute "y"
+z = zz        # E: Incompatible types in assignment (expression has type "Union[int, Any]", variable has type "str")
+
+[builtins fixtures/isinstance.pyi]
+
+[case testUnionMethodCalls]
+from typing import Union
+
+class A:
+    def foo(self) -> int: pass
+class B:
+    def foo(self) -> int: pass
+class C:
+    def foo(self) -> str: pass
+
+x = None # type: Union[A, B]
+y = None # type: Union[A, C]
+i = None # type: int
+
+x.foo()
+y.foo()
+i = x.foo()
+i = y.foo()   # E: Incompatible types in assignment (expression has type "Union[int, str]", variable has type "int")
+
+[builtins fixtures/isinstance.pyi]
+
+[case testUnionIndexing]
+from typing import Union, List
+x = None # type: Union[List[int], str]
+x[2]
+x[2] + 1 # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnionAsOverloadArg]
+from foo import *
+x = 0
+x = f(1)
+x = f('')
+s = ''
+s = f(int)
+s = f(1)    # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+x = f(int)  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[file foo.pyi]
+from typing import Union, overload
+ at overload
+def f(x: Union[int, str]) -> int: pass
+ at overload
+def f(x: type) -> str: pass
+
+[case testUnionWithNoneItem]
+from typing import Union
+def f() -> Union[int, None]: pass
+x = 1
+x = f()
+
+[case testOptional]
+from typing import Optional
+def f(x: Optional[int]) -> None: pass
+f(1)
+f(None)
+f('') # E: Argument 1 to "f" has incompatible type "str"; expected "Optional[int]"
+
+[case testUnionSimplificationGenericFunction]
+from typing import TypeVar, Union, List
+T = TypeVar('T')
+def f(x: List[T]) -> Union[T, int]: pass
+def g(y: str) -> None: pass
+a = f([1])
+g(a) # E: Argument 1 to "g" has incompatible type "int"; expected "str"
+[builtins fixtures/list.pyi]
+
+[case testUnionSimplificationGenericClass]
+from typing import TypeVar, Union, Generic
+T = TypeVar('T')
+U = TypeVar('U')
+class C(Generic[T, U]):
+    def f(self, x: str) -> Union[T, U]: pass
+a = C() # type: C[int, int]
+b = a.f('a')
+a.f(b) # E: Argument 1 to "f" of "C" has incompatible type "int"; expected "str"
+
+[case testUnionOrderEquivalence]
+from typing import Union
+
+def foo(): pass
+
+S = str
+T = int
+
+if foo():
+    def f(x: Union[int, str]) -> None: pass
+elif foo():
+    def f(x: Union[str, int]) -> None: pass
+elif foo():
+    def f(x: Union[int, str, int, int, str]) -> None: pass
+elif foo():
+    def f(x: Union[int, str, float]) -> None: pass  # E: All conditional function variants must have identical signatures
+elif foo():
+    def f(x: Union[S, T]) -> None: pass
+elif foo():
+    def f(x: Union[str]) -> None: pass  # E: All conditional function variants must have identical signatures
+else:
+    def f(x: Union[Union[int, T], Union[S, T], str]) -> None: pass
+
+# Checks bidirectionality of testing. The first definition of g is consistent with
+# the second, but not vice-versa.
+if foo():
+    def g(x: Union[int, str, bytes]) -> None: pass
+else:
+    def g(x: Union[int, str]) -> None: pass  # E: All conditional function variants must have identical signatures
+
+[case testUnionSimplificationSpecialCases]
+from typing import Any, TypeVar, Union
+
+class C(Any): pass
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+a = None # type: Any
+
+reveal_type(u(C(), None))  # E: Revealed type is '__main__.C*'
+reveal_type(u(None, C()))  # E: Revealed type is '__main__.C*'
+
+reveal_type(u(C(), a))  # E: Revealed type is 'Union[Any, __main__.C*]'
+reveal_type(u(a, C()))  # E: Revealed type is 'Union[__main__.C*, Any]'
+
+reveal_type(u(C(), C()))  # E: Revealed type is '__main__.C*'
+reveal_type(u(a, a))  # E: Revealed type is 'Any'
+
+[case testUnionSimplificationSpecialCase2]
+from typing import Any, TypeVar, Union
+
+class C(Any): pass
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+def f(x: T) -> None:
+    reveal_type(u(C(), x)) # E: Revealed type is 'Union[T`-1, __main__.C*]'
+    reveal_type(u(x, C())) # E: Revealed type is 'Union[__main__.C*, T`-1]'
+
+[case testUnionSimplificationSpecialCase3]
+from typing import Any, TypeVar, Generic, Union
+
+class C(Any): pass
+
+V = TypeVar('V')
+T = TypeVar('T')
+
+class M(Generic[V]):
+    def get(self, default: T) -> Union[V, T]: ...
+
+def f(x: M[C]) -> None:
+    y = x.get(None)
+    reveal_type(y) # E: Revealed type is '__main__.C'
+
+[case testUnionSimplificationSpecialCases]
+from typing import Any, TypeVar, Union
+
+class C(Any): pass
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+a = None # type: Any
+
+# Base-class-Any and None, simplify
+reveal_type(u(C(), None))  # E: Revealed type is '__main__.C*'
+reveal_type(u(None, C()))  # E: Revealed type is '__main__.C*'
+
+# Normal instance type and None, simplify
+reveal_type(u(1, None))  # E: Revealed type is 'builtins.int*'
+reveal_type(u(None, 1))  # E: Revealed type is 'builtins.int*'
+
+# Normal instance type and base-class-Any, no simplification
+reveal_type(u(C(), 1))  # E: Revealed type is 'Union[builtins.int*, __main__.C*]'
+reveal_type(u(1, C()))  # E: Revealed type is 'Union[__main__.C*, builtins.int*]'
+
+# Normal instance type and Any, no simplification
+reveal_type(u(1, a))  # E: Revealed type is 'Union[Any, builtins.int*]'
+reveal_type(u(a, 1))  # E: Revealed type is 'Union[builtins.int*, Any]'
+
+# Any and base-class-Any, no simplificaiton
+reveal_type(u(C(), a))  # E: Revealed type is 'Union[Any, __main__.C*]'
+reveal_type(u(a, C()))  # E: Revealed type is 'Union[__main__.C*, Any]'
+
+# Two normal instance types, simplify
+reveal_type(u(1, object()))  # E: Revealed type is 'builtins.object*'
+reveal_type(u(object(), 1))  # E: Revealed type is 'builtins.object*'
+
+# Two normal instance types, no simplification
+reveal_type(u(1, ''))  # E: Revealed type is 'Union[builtins.str*, builtins.int*]'
+reveal_type(u('', 1))  # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+
+[case testUnionSimplificationWithDuplicateItems]
+from typing import Any, TypeVar, Union
+
+class C(Any): pass
+
+T = TypeVar('T')
+S = TypeVar('S')
+R = TypeVar('R')
+def u(x: T, y: S, z: R) -> Union[R, S, T]: pass
+
+a = None # type: Any
+
+reveal_type(u(1, 1, 1))  # E: Revealed type is 'builtins.int*'
+reveal_type(u(C(), C(), None))  # E: Revealed type is '__main__.C*'
+reveal_type(u(a, a, 1))  # E: Revealed type is 'Union[builtins.int*, Any]'
+reveal_type(u(a, C(), a))  # E: Revealed type is 'Union[Any, __main__.C*]'
+reveal_type(u('', 1, 1))  # E: Revealed type is 'Union[builtins.int*, builtins.str*]'
+
+[case testUnionAndBinaryOperation]
+from typing import Union
+class A: pass
+def f(x: Union[int, str, A]):
+    x + object() # E: Unsupported left operand type for + (some union) \
+        # E: Unsupported operand types for + (likely involving Union)
+
+[case testNarrowingDownNamedTupleUnion]
+from typing import NamedTuple, Union
+
+A = NamedTuple('A', [('y', int)])
+B = NamedTuple('B', [('x', int)])
+C = NamedTuple('C', [('x', int)])
+
+def foo(a: Union[A, B, C]):
+    if isinstance(a, (B, C)):
+        reveal_type(a) # E: Revealed type is 'Union[Tuple[builtins.int, fallback=__main__.B], Tuple[builtins.int, fallback=__main__.C]]'
+        a.x
+        a.y # E: Item "B" of "Union[B, C]" has no attribute "y" \
+            # E: Item "C" of "Union[B, C]" has no attribute "y"
+        b = a # type: Union[B, C]
+[builtins fixtures/isinstance.pyi]
+
+[case testSimplifyingUnionAndTypePromotions]
+from typing import TypeVar, Union
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+reveal_type(u(1, 2.3))  # E: Revealed type is 'builtins.float*'
+reveal_type(u(2.3, 1))  # E: Revealed type is 'builtins.float*'
+reveal_type(u(False, 2.2)) # E: Revealed type is 'builtins.float*'
+reveal_type(u(2.2, False)) # E: Revealed type is 'builtins.float*'
+[builtins fixtures/primitives.pyi]
+
+[case testSimplifyingUnionWithTypeTypes1]
+from typing import TypeVar, Union, Type, Any
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+t_o = None  # type: Type[object]
+t_s = None  # type: Type[str]
+t_a = None  # type: Type[Any]
+
+# Two identical items
+reveal_type(u(t_o, t_o)) # E: Revealed type is 'Type[builtins.object]'
+reveal_type(u(t_s, t_s)) # E: Revealed type is 'Type[builtins.str]'
+reveal_type(u(t_a, t_a)) # E: Revealed type is 'Type[Any]'
+reveal_type(u(type, type)) # E: Revealed type is 'def (x: builtins.object) -> builtins.type'
+
+# One type, other non-type
+reveal_type(u(t_s, 1)) # E: Revealed type is 'Union[builtins.int*, Type[builtins.str]]'
+reveal_type(u(1, t_s)) # E: Revealed type is 'Union[Type[builtins.str], builtins.int*]'
+reveal_type(u(type, 1)) # E: Revealed type is 'Union[builtins.int*, def (x: builtins.object) -> builtins.type]'
+reveal_type(u(1, type)) # E: Revealed type is 'Union[def (x: builtins.object) -> builtins.type, builtins.int*]'
+reveal_type(u(t_a, 1)) # E: Revealed type is 'Union[builtins.int*, Type[Any]]'
+reveal_type(u(1, t_a)) # E: Revealed type is 'Union[Type[Any], builtins.int*]'
+reveal_type(u(t_o, 1)) # E: Revealed type is 'Union[builtins.int*, Type[builtins.object]]'
+reveal_type(u(1, t_o)) # E: Revealed type is 'Union[Type[builtins.object], builtins.int*]'
+
+[case testSimplifyingUnionWithTypeTypes2]
+from typing import TypeVar, Union, Type, Any
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+t_o = None  # type: Type[object]
+t_s = None  # type: Type[str]
+t_a = None  # type: Type[Any]
+t = None    # type: type
+
+# Union with object
+reveal_type(u(t_o, object())) # E: Revealed type is 'builtins.object*'
+reveal_type(u(object(), t_o)) # E: Revealed type is 'builtins.object*'
+reveal_type(u(t_s, object())) # E: Revealed type is 'builtins.object*'
+reveal_type(u(object(), t_s)) # E: Revealed type is 'builtins.object*'
+reveal_type(u(t_a, object())) # E: Revealed type is 'builtins.object*'
+reveal_type(u(object(), t_a)) # E: Revealed type is 'builtins.object*'
+
+# Union between type objects
+reveal_type(u(t_o, t_a)) # E: Revealed type is 'Union[Type[Any], Type[builtins.object]]'
+reveal_type(u(t_a, t_o)) # E: Revealed type is 'Union[Type[builtins.object], Type[Any]]'
+reveal_type(u(t_s, t_o)) # E: Revealed type is 'Type[builtins.object]'
+reveal_type(u(t_o, t_s)) # E: Revealed type is 'Type[builtins.object]'
+reveal_type(u(t_o, type)) # E: Revealed type is 'Type[builtins.object]'
+reveal_type(u(type, t_o)) # E: Revealed type is 'Type[builtins.object]'
+reveal_type(u(t_a, t)) # E: Revealed type is 'builtins.type*'
+reveal_type(u(t, t_a)) # E: Revealed type is 'builtins.type*'
+# The following should arguably not be simplified, but it's unclear how to fix then
+# without causing regressions elsewhere.
+reveal_type(u(t_o, t)) # E: Revealed type is 'builtins.type*'
+reveal_type(u(t, t_o)) # E: Revealed type is 'builtins.type*'
+
+[case testNotSimplifyingUnionWithMetaclass]
+from typing import TypeVar, Union, Type, Any
+
+class M(type): pass
+class M2(M): pass
+class A(metaclass=M): pass
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+a: Any
+t_a: Type[A]
+
+reveal_type(u(M(*a), t_a)) # E: Revealed type is 'Union[Type[__main__.A], __main__.M*]'
+reveal_type(u(t_a, M(*a))) # E: Revealed type is 'Union[__main__.M*, Type[__main__.A]]'
+
+reveal_type(u(M2(*a), t_a)) # E: Revealed type is 'Union[Type[__main__.A], __main__.M2*]'
+reveal_type(u(t_a, M2(*a))) # E: Revealed type is 'Union[__main__.M2*, Type[__main__.A]]'
+
+[case testSimplifyUnionWithCallable]
+from typing import TypeVar, Union, Any, Callable
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+class C: pass
+class D(C): pass
+
+D_C: Callable[[D], C]
+A_C: Callable[[Any], C]
+D_A: Callable[[D], Any]
+C_C: Callable[[C], C]
+D_D: Callable[[D], D]
+i_C: Callable[[int], C]
+
+# TODO: Test argument names and kinds once we have flexible callable types.
+
+reveal_type(u(D_C, D_C)) # E: Revealed type is 'def (__main__.D) -> __main__.C'
+
+reveal_type(u(A_C, D_C)) # E: Revealed type is 'Union[def (__main__.D) -> __main__.C, def (Any) -> __main__.C]'
+reveal_type(u(D_C, A_C)) # E: Revealed type is 'Union[def (Any) -> __main__.C, def (__main__.D) -> __main__.C]'
+
+reveal_type(u(D_A, D_C)) # E: Revealed type is 'Union[def (__main__.D) -> __main__.C, def (__main__.D) -> Any]'
+reveal_type(u(D_C, D_A)) # E: Revealed type is 'Union[def (__main__.D) -> Any, def (__main__.D) -> __main__.C]'
+
+reveal_type(u(D_C, C_C)) # E: Revealed type is 'def (__main__.D) -> __main__.C'
+reveal_type(u(C_C, D_C)) # E: Revealed type is 'def (__main__.D) -> __main__.C'
+
+reveal_type(u(D_C, D_D)) # E: Revealed type is 'def (__main__.D) -> __main__.C'
+reveal_type(u(D_D, D_C)) # E: Revealed type is 'def (__main__.D) -> __main__.C'
+
+reveal_type(u(D_C, i_C)) # E: Revealed type is 'Union[def (builtins.int) -> __main__.C, def (__main__.D) -> __main__.C]'
+
+[case testUnionOperatorMethodSpecialCase]
+from typing import Union
+class C:
+    def __le__(self, x: 'C') -> int: ...
+class D:
+    def __le__(self, other) -> int: ...
+class E:
+    def __ge__(self, other: Union[C, D]) -> int: ...
+
+[case testUnionSimplificationWithBoolIntAndFloat]
+from typing import List, Union
+l = reveal_type([]) # type: List[Union[bool, int, float]] \
+    # E: Revealed type is 'builtins.list[builtins.float]'
+reveal_type(l) \
+    # E: Revealed type is 'builtins.list[Union[builtins.bool, builtins.int, builtins.float]]'
+[builtins fixtures/list.pyi]
+
+[case testUnionSimplificationWithBoolIntAndFloat2]
+from typing import List, Union
+l = reveal_type([]) # type: List[Union[bool, int, float, str]] \
+    # E: Revealed type is 'builtins.list[Union[builtins.float, builtins.str]]'
+reveal_type(l) \
+    # E: Revealed type is 'builtins.list[Union[builtins.bool, builtins.int, builtins.float, builtins.str]]'
+[builtins fixtures/list.pyi]
+
+[case testNestedUnionsProcessedCorrectly]
+from typing import Union
+
+class A: pass
+class B: pass
+class C: pass
+
+def foo(bar: Union[Union[A, B], C]) -> None:
+    if isinstance(bar, A):
+        reveal_type(bar)  # E: Revealed type is '__main__.A'
+    else:
+        reveal_type(bar)  # E: Revealed type is 'Union[__main__.B, __main__.C]'
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testAssignAnyToUnion]
+from typing import Union, Any
+x: Union[int, str]
+a: Any
+if bool():
+    x = a
+    # TODO: Maybe we should infer Any as the type instead.
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[builtins fixtures/bool.pyi]
+
+[case testAssignAnyToUnionWithAny]
+from typing import Union, Any
+x: Union[int, Any]
+a: Any
+if bool():
+    x = a
+    reveal_type(x)  # E: Revealed type is 'Any'
+reveal_type(x)  # E: Revealed type is 'Union[builtins.int, Any]'
+[builtins fixtures/bool.pyi]
diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test
new file mode 100644
index 0000000..acf975b
--- /dev/null
+++ b/test-data/unit/check-unreachable-code.test
@@ -0,0 +1,536 @@
+-- Type checker test cases for conditional checks that result in some
+-- blocks classified as unreachable (they are not type checked or semantically
+-- analyzed).
+--
+-- For example, we skip blocks that will not be executed on the active
+-- Python version.
+
+[case testConditionalTypeAliasPY3]
+import typing
+def f(): pass
+PY3 = f()
+if PY3:
+    t = int
+    x = object() + 'x' # E: Unsupported left operand type for + ("object")
+else:
+    t = str
+    y = 'x' / 1
+x
+z = 1 # type: t
+
+[case testConditionalTypeAliasPY3_python2]
+import typing
+def f(): pass
+PY3 = f()
+if PY3:
+    t = int
+    x = object() + 'x'
+else:
+    t = str
+    y = 'x' / 1 # E: "str" has no attribute "__div__"
+y
+z = '' # type: t
+
+[case testConditionalAssignmentPY2]
+import typing
+def f(): pass
+PY2 = f()
+if PY2:
+    x = object() + 'x'
+else:
+    y = 'x' / 1 # E: Unsupported left operand type for / ("str")
+y
+
+[case testConditionalAssignmentPY2_python2]
+import typing
+def f(): pass
+PY2 = f()
+if PY2:
+    x = object() + 'x' # E: Unsupported left operand type for + ("object")
+else:
+    y = 'x' / 1
+x
+
+[case testConditionalImport]
+import typing
+def f(): pass
+PY2 = f()
+if PY2:
+    import fuzzybar
+    from barbar import *
+    from pawwaw import a, bc
+else:
+    import m
+[file m.py]
+import typing
+x = 1
+x = 'a'
+[out]
+tmp/m.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testNegatedMypyConditional]
+import typing
+MYPY = 0
+if not MYPY:
+    import xyz753
+else:
+    import pow123 # E
+[builtins fixtures/bool.pyi]
+[out]
+main:6: error: Cannot find module named 'pow123'
+main:6: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testMypyConditional]
+import typing
+MYPY = 0
+if MYPY:
+    None + 1 # E: Unsupported left operand type for + (None)
+else:
+    None + ''
+[builtins fixtures/bool.pyi]
+
+[case testTypeCheckingConditional]
+import typing
+if typing.TYPE_CHECKING:
+    import pow123 # E
+else:
+    import xyz753
+[out]
+main:3: error: Cannot find module named 'pow123'
+main:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testTypeCheckingConditionalFromImport]
+from typing import TYPE_CHECKING
+if TYPE_CHECKING:
+    import pow123 # E
+else:
+    import xyz753
+[out]
+main:3: error: Cannot find module named 'pow123'
+main:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testNegatedTypeCheckingConditional]
+import typing
+if not typing.TYPE_CHECKING:
+    import pow123 # E
+else:
+    import xyz753
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Cannot find module named 'xyz753'
+main:5: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testUndefinedTypeCheckingConditional]
+if not TYPE_CHECKING: # E
+    import pow123
+else:
+    import xyz753
+[builtins fixtures/bool.pyi]
+[out]
+main:1: error: Name 'TYPE_CHECKING' is not defined
+main:4: error: Cannot find module named 'xyz753'
+main:4: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testConditionalClassDefPY3]
+def f(): pass
+PY3 = f()
+if PY3:
+    pass
+else:
+    class X(object):
+        pass
+
+[case testUnreachabilityAndElifPY3]
+def f(): pass
+PY3 = f()
+if PY3:
+    pass
+elif bool():
+    import nonexistent
+    1 + ''
+else:
+    import bad_name
+    1 + ''
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testSysVersionInfo_python2]
+import sys
+if sys.version_info[0] >= 3:
+    def foo():
+        # type: () -> int
+        return 0
+else:
+    def foo():
+        # type: () -> str
+        return ''
+reveal_type(foo())  # E: Revealed type is 'builtins.str'
+[builtins_py2 fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfo]
+import sys
+if sys.version_info[0] >= 3:
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+reveal_type(foo())  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoNegated_python2]
+import sys
+if not (sys.version_info[0] < 3):
+    def foo():
+        # type: () -> int
+        return 0
+else:
+    def foo():
+        # type: () -> str
+        return ''
+reveal_type(foo())  # E: Revealed type is 'builtins.str'
+[builtins_py2 fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoNegated]
+import sys
+if not (sys.version_info[0] < 3):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+reveal_type(foo())  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced1]
+import sys
+if sys.version_info[:1] >= (3,):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced2]
+import sys
+if sys.version_info[:2] >= (3, 0):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced3]
+import sys
+if sys.version_info[:] >= (3, 0):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced4]
+import sys
+if sys.version_info[0:2] >= (3, 0):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced5]
+import sys
+if sys.version_info[0:] >= (3,):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced6]
+import sys
+if sys.version_info[1:] >= (5,):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced7]
+import sys
+if sys.version_info >= (3, 5):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced8]
+# Our pyversion only has (major, minor),
+# so testing for (major, minor, bugfix) is unsupported.
+import sys
+if sys.version_info >= (3, 5, 0):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''  # E: All conditional function variants must have identical signatures
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced9]
+# Our pyversion only has (major, minor),
+# so testing for (minor, bugfix) is unsupported (also it's silly :-).
+import sys
+if sys.version_info[1:] >= (5, 0):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''  # E: All conditional function variants must have identical signatures
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatform1]
+import sys
+if sys.platform == 'fictional':
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + ''
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatform2]
+import sys
+if sys.platform != 'fictional':
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatformNegated]
+import sys
+if not (sys.platform == 'fictional'):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoClass]
+import sys
+if sys.version_info < (3, 5):
+    class C:
+        pass
+else:
+    class C:
+        def foo(self) -> int: return 0
+C().foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoImport]
+import sys
+if sys.version_info >= (3, 5):
+    import collections
+else:
+    collections = None
+Pt = collections.namedtuple('Pt', 'x y z')
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoVariable]
+import sys
+if sys.version_info >= (3, 5):
+    x = ''
+else:
+    x = 0
+x + ''
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoInClass]
+import sys
+class C:
+    if sys.version_info >= (3, 5):
+        def foo(self) -> int: return 0
+    else:
+        def foo(self) -> str: return ''
+reveal_type(C().foo())  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoInFunction]
+import sys
+def foo() -> None:
+    if sys.version_info >= (3, 5):
+        x = ''
+    else:
+        x = 0
+    reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatformInMethod]
+import sys
+class C:
+    def foo(self) -> None:
+        if sys.platform != 'fictional':
+            x = ''
+        else:
+            x = 0
+        reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatformInFunctionImport]
+import sys
+def foo() -> None:
+    if sys.platform != 'fictional':
+        import a
+    else:
+        import b as a
+    a.x
+[file a.py]
+x = 1
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testCustomSysVersionInfo]
+# flags: --python-version 3.5
+import sys
+if sys.version_info == (3, 5):
+    x = "foo"
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testCustomSysVersionInfo2]
+# flags: --python-version 3.5
+import sys
+if sys.version_info == (3, 6):
+    x = "foo"
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testCustomSysPlatform]
+# flags: --platform linux
+import sys
+if sys.platform == 'linux':
+    x = "foo"
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testCustomSysPlatform2]
+# flags: --platform win32
+import sys
+if sys.platform == 'linux':
+    x = "foo"
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testCustomSysPlatformStartsWith]
+# flags: --platform win32
+import sys
+if sys.platform.startswith('win'):
+    x = "foo"
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testShortCircuitInExpression]
+import typing
+def make() -> bool: pass
+PY2 = PY3 = make()
+
+a = PY2 and 's'
+b = PY3 and 's'
+c = PY2 or 's'
+d = PY3 or 's'
+e = (PY2 or PY3) and 's'
+f = (PY3 or PY2) and 's'
+g = (PY2 or PY3) or 's'
+h = (PY3 or PY2) or 's'
+reveal_type(a)  # E: Revealed type is 'builtins.bool'
+reveal_type(b)  # E: Revealed type is 'builtins.str'
+reveal_type(c)  # E: Revealed type is 'builtins.str'
+reveal_type(d)  # E: Revealed type is 'builtins.bool'
+reveal_type(e)  # E: Revealed type is 'builtins.str'
+reveal_type(f)  # E: Revealed type is 'builtins.str'
+reveal_type(g)  # E: Revealed type is 'builtins.bool'
+reveal_type(h)  # E: Revealed type is 'builtins.bool'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testShortCircuitAndWithConditionalAssignment]
+# flags: --platform linux
+import sys
+
+def f(): pass
+PY2 = f()
+if PY2 and sys.platform == 'linux':
+    x = 'foo'
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.int'
+if sys.platform == 'linux' and PY2:
+    y = 'foo'
+else:
+    y = 3
+reveal_type(y)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+
+[case testShortCircuitOrWithConditionalAssignment]
+# flags: --platform linux
+import sys
+
+def f(): pass
+PY2 = f()
+if PY2 or sys.platform == 'linux':
+    x = 'foo'
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.str'
+if sys.platform == 'linux' or PY2:
+    y = 'foo'
+else:
+    y = 3
+reveal_type(y)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+
+[case testConditionalAssertWithoutElse]
+import typing
+
+class A: pass
+class B(A): pass
+
+x = A()
+reveal_type(x)  # E: Revealed type is '__main__.A'
+
+if typing.TYPE_CHECKING:
+    assert isinstance(x, B)
+    reveal_type(x)  # E: Revealed type is '__main__.B'
+
+reveal_type(x)  # E: Revealed type is '__main__.B'
+
+[builtins fixtures/isinstancelist.pyi]
diff --git a/test-data/unit/check-unsupported.test b/test-data/unit/check-unsupported.test
new file mode 100644
index 0000000..3406d57
--- /dev/null
+++ b/test-data/unit/check-unsupported.test
@@ -0,0 +1,17 @@
+-- Tests for unsupported features
+
+
+[case testDecorateOverloadedFunction]
+from foo import *
+[file foo.pyi]
+# The error messages are not the most informative ever.
+def d(x): pass
+ at d
+def f(): pass
+def f(x): pass # E
+def g(): pass
+ at d # E
+def g(x): pass
+[out]
+tmp/foo.pyi:5: error: Name 'f' already defined
+tmp/foo.pyi:7: error: Name 'g' already defined
diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test
new file mode 100644
index 0000000..73dddd5
--- /dev/null
+++ b/test-data/unit/check-varargs.test
@@ -0,0 +1,595 @@
+-- Test cases for the type checker related to varargs.
+
+
+-- Varargs within body
+-- -------------------
+
+
+[case testVarArgsWithinFunction]
+from typing import Tuple
+def f( *b: 'B') -> None:
+    ab = None # type: Tuple[B, ...]
+    ac = None # type: Tuple[C, ...]
+    b = ac # E: Incompatible types in assignment (expression has type Tuple[C, ...], variable has type Tuple[B, ...])
+    ac = b # E: Incompatible types in assignment (expression has type Tuple[B, ...], variable has type Tuple[C, ...])
+    b = ab
+    ab = b
+
+class B: pass
+class C: pass
+[builtins fixtures/tuple.pyi]
+[out]
+
+
+[case testVarArgsAreTuple]
+from typing import Tuple, Sequence
+def want_tuple(types: Tuple[type, ...]): pass
+def want_sequence(types: Sequence[type]): pass
+def test(*t: type) -> None:
+    want_tuple(t)
+    want_sequence(t)
+[builtins fixtures/tuple.pyi]
+[out]
+
+
+-- Calling varargs function
+-- ------------------------
+
+
+[case testCallingVarArgsFunction]
+
+a = None # type: A
+b = None # type: B
+c = None # type: C
+
+f(c)       # E: Argument 1 to "f" has incompatible type "C"; expected "A"
+f(a, b, c) # E: Argument 3 to "f" has incompatible type "C"; expected "A"
+f(g())     # E: "g" does not return a value
+f(a, g())  # E: "g" does not return a value
+f()
+f(a)
+f(b)
+f(a, b, a, b)
+
+def f( *a: 'A') -> None: pass
+
+def g() -> None: pass
+
+class A: pass
+class B(A): pass
+class C: pass
+[builtins fixtures/list.pyi]
+
+[case testCallingVarArgsFunctionWithAlsoNormalArgs]
+
+a = None # type: A
+b = None # type: B
+c = None # type: C
+
+f(a)       # E: Argument 1 to "f" has incompatible type "A"; expected "C"
+f(c, c)    # E: Argument 2 to "f" has incompatible type "C"; expected "A"
+f(c, a, b, c)  # E: Argument 4 to "f" has incompatible type "C"; expected "A"
+f(c)
+f(c, a)
+f(c, b, b, a, b)
+
+def f(a: 'C', *b: 'A') -> None: pass
+
+class A: pass
+class B(A): pass
+class C: pass
+[builtins fixtures/list.pyi]
+
+[case testCallingVarArgsFunctionWithDefaultArgs]
+
+a = None # type: A
+b = None # type: B
+c = None # type: C
+
+f(a)           # E: Argument 1 to "f" has incompatible type "A"; expected "Optional[C]"
+f(c, c)        # E: Argument 2 to "f" has incompatible type "C"; expected "A"
+f(c, a, b, c)  # E: Argument 4 to "f" has incompatible type "C"; expected "A"
+f()
+f(c)
+f(c, a)
+f(c, b, b, a, b)
+
+def f(a: 'C' = None, *b: 'A') -> None:
+    pass
+
+class A: pass
+class B(A): pass
+class C: pass
+[builtins fixtures/list.pyi]
+
+[case testCallVarargsFunctionWithIterable]
+from typing import Iterable
+it1 = None  # type: Iterable[int]
+it2 = None  # type: Iterable[str]
+def f(*x: int) -> None: pass
+f(*it1)
+f(*it2) # E: Argument 1 to "f" has incompatible type *Iterable[str]; expected "int"
+[builtins fixtures/for.pyi]
+
+[case testCallVarargsFunctionWithIterableAndPositional]
+
+from typing import Iterable
+it1 = None  # type: Iterable[int]
+def f(*x: int) -> None: pass
+f(*it1, 1, 2)
+f(*it1, 1, *it1, 2)
+f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
+[builtins fixtures/for.pyi]
+
+[case testCallVarargsFunctionWithTupleAndPositional]
+
+def f(*x: int) -> None: pass
+it1 = (1, 2)
+f(*it1, 1, 2)
+f(*it1, 1, *it1, 2)
+f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
+[builtins fixtures/for.pyi]
+
+
+-- Calling varargs function + type inference
+-- -----------------------------------------
+
+
+[case testTypeInferenceWithCalleeVarArgs]
+from typing import TypeVar
+T = TypeVar('T')
+a = None # type: A
+b = None # type: B
+c = None # type: C
+o = None # type: object
+
+a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+b = f(b, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+o = f()
+a = f(a)
+a = f(b)
+a = f(a, b, a)
+o = f(a, b, o)
+c = f(c)
+
+def f( *a: T) -> T:
+    pass
+
+class A: pass
+class B(A): pass
+class C: pass
+[builtins fixtures/list.pyi]
+
+[case testTypeInferenceWithCalleeVarArgsAndDefaultArgs]
+from typing import TypeVar
+T = TypeVar('T')
+a = None # type: A
+o = None # type: object
+
+a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = f(a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = f(a, a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = f(a, a, a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+a = f(a)
+a = f(a, a)
+a = f(a, a, a)
+
+def f(a: T, b: T = None, *c: T) -> T:
+    pass
+
+class A: pass
+[builtins fixtures/list.pyi]
+
+
+-- Calling normal function with varargs
+-- ------------------------------------
+
+
+[case testCallingWithListVarArgs]
+from typing import List, Any, cast
+aa = None # type: List[A]
+ab = None # type: List[B]
+a = None # type: A
+b = None # type: B
+
+f(*aa)    # Fail
+f(a, *ab) # Ok
+f(a, b)
+(cast(Any, f))(*aa)     # IDEA: Move to check-dynamic?
+(cast(Any, f))(a, *ab)  # IDEA: Move to check-dynamic?
+
+def f(a: 'A', b: 'B') -> None:
+    pass
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+main:7: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
+
+[case testCallingWithTupleVarArgs]
+
+a = None # type: A
+b = None # type: B
+c = None # type: C
+cc = None # type: CC
+
+f(*(a, b, b)) # E: Argument 1 to "f" has incompatible type *"Tuple[A, B, B]"; expected "C"
+f(*(b, b, c)) # E: Argument 1 to "f" has incompatible type *"Tuple[B, B, C]"; expected "A"
+f(a, *(b, b)) # E: Argument 2 to "f" has incompatible type *"Tuple[B, B]"; expected "C"
+f(b, *(b, c)) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(*(a, b))    # E: Too few arguments for "f"
+f(*(a, b, c, c)) # E: Too many arguments for "f"
+f(a, *(b, c, c)) # E: Too many arguments for "f"
+f(*(a, b, c))
+f(a, *(b, c))
+f(a, b, *(c,))
+f(a, *(b, cc))
+
+def f(a: 'A', b: 'B', c: 'C') -> None: pass
+
+class A: pass
+class B: pass
+class C: pass
+class CC(C): pass
+[builtins fixtures/tuple.pyi]
+
+[case testInvalidVarArg]
+
+a = None # type: A
+
+f(*None)
+f(*a)    # E: List or tuple expected as variable arguments
+f(*(a,))
+
+def f(a: 'A') -> None:
+    pass
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+
+-- Calling varargs function with varargs
+-- -------------------------------------
+
+
+[case testCallingVarArgsFunctionWithListVarArgs]
+from typing import List
+aa, ab, a, b = None, None, None, None # type: (List[A], List[B], A, B)
+f(*aa)           # Fail
+f(a, *aa)        # Fail
+f(b, *ab)        # Fail
+f(a, a, *ab)     # Fail
+f(a, b, *aa)     # Fail
+f(b, b, *ab)     # Fail
+g(*ab)           # Fail
+f(a, *ab)
+f(a, b, *ab)
+f(a, b, b, *ab)
+g(*aa)
+
+def f(a: 'A', *b: 'B') -> None: pass
+def g(a: 'A', *b: 'A') -> None: pass
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
+main:4: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
+main:5: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+main:6: error: Argument 2 to "f" has incompatible type "A"; expected "B"
+main:7: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
+main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+main:9: error: Argument 1 to "g" has incompatible type *List[B]; expected "A"
+
+[case testCallingVarArgsFunctionWithTupleVarArgs]
+
+a, b, c, cc = None, None, None, None # type: (A, B, C, CC)
+
+f(*(b, b, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[B, B, B]"; expected "A"
+f(*(a, a, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, A, B]"; expected "B"
+f(*(a, b, a))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, B, A]"; expected "B"
+f(a, *(a, b))   # E: Argument 2 to "f" has incompatible type *"Tuple[A, B]"; expected "B"
+f(b, *(b, b))   # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(b, b, *(b,))  # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(a, a, *(b,))  # E: Argument 2 to "f" has incompatible type "A"; expected "B"
+f(a, b, *(a,))  # E: Argument 3 to "f" has incompatible type *"Tuple[A]"; expected "B"
+f(*())          # E: Too few arguments for "f"
+f(*(a, b, b))
+f(a, *(b, b))
+f(a, b, *(b,))
+
+def f(a: 'A', *b: 'B') -> None:
+    pass
+
+class A: pass
+class B: pass
+class C: pass
+class CC(C): pass
+[builtins fixtures/list.pyi]
+
+
+-- Varargs special cases
+-- ---------------------
+
+
+[case testDynamicVarArg]
+from typing import Any
+d, a = None, None # type: (Any, A)
+f(a, a, *d) # Fail
+f(a, *d)    # Fail
+f(*d)       # Ok
+
+g(*d)
+g(a, *d)
+g(a, a, *d)
+
+def f(a: 'A') -> None: pass
+def g(a: 'A', *b: 'A') -> None: pass
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Too many arguments for "f"
+main:4: error: Too many arguments for "f"
+
+[case testListVarArgsAndSubtyping]
+from typing import List
+aa = None # type: List[A]
+ab = None # type: List[B]
+
+g(*aa) # E: Argument 1 to "g" has incompatible type *List[A]; expected "B"
+f(*aa)
+f(*ab)
+g(*ab)
+
+def f( *a: 'A') -> None:
+    pass
+
+def g( *a: 'B') -> None:
+    pass
+
+class A: pass
+class B(A): pass
+[builtins fixtures/list.pyi]
+
+[case testCallerVarArgsAndDefaultArgs]
+
+a, b = None, None # type: (A, B)
+f(*())        # Fail
+f(a, *[a])    # Fail
+f(a, b, *[a]) # Fail
+f(*(a, a, b)) # Fail
+f(*(a,))
+f(*(a, b))
+f(*(a, b, b, b))
+f(a, *[])
+f(a, *[b])
+f(a, *[b, b])
+
+def f(a: 'A', b: 'B' = None, *c: 'B') -> None:
+    pass
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Too few arguments for "f"
+main:4: error: Argument 2 to "f" has incompatible type *List[A]; expected "Optional[B]"
+main:4: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
+main:5: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
+main:6: error: Argument 1 to "f" has incompatible type *"Tuple[A, A, B]"; expected "Optional[B]"
+
+[case testVarArgsAfterKeywordArgInCall1-skip]
+# see: mypy issue #2729
+def f(x: int, y: str) -> None: pass
+f(x=1, *[2])
+[builtins fixtures/list.pyi]
+[out]
+main:2: error: "f" gets multiple values for keyword argument "x"
+main:2: error: Argument 2 to "f" has incompatible type *List[int]; expected "str"
+
+[case testVarArgsAfterKeywordArgInCall2-skip]
+# see: mypy issue #2729
+def f(x: int, y: str) -> None: pass
+f(y='x', *[1])
+[builtins fixtures/list.pyi]
+[out]
+main:2: error: "f" gets multiple values for keyword argument "y"
+main:2: error: Argument 2 to "f" has incompatible type *List[int]; expected "str"
+
+[case testVarArgsAfterKeywordArgInCall3]
+def f(x: int, y: str) -> None: pass
+f(y='x', *(1,))
+[builtins fixtures/list.pyi]
+
+[case testVarArgsAfterKeywordArgInCall4]
+def f(x: int, *, y: str) -> None: pass
+f(y='x', *[1])
+[builtins fixtures/list.pyi]
+
+[case testVarArgsAfterKeywordArgInCall5]
+def f(x: int, *, y: str) -> None: pass
+f(y='x', *(1,))
+[builtins fixtures/list.pyi]
+
+
+-- Overloads + varargs
+-- -------------------
+
+
+[case testIntersectionTypesAndVarArgs]
+from foo import *
+[file foo.pyi]
+from typing import overload
+a, b = None, None # type: (A, B)
+
+b = f()        # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(a)       # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(a, b)    # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(b)       # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = f(b, b)    # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = f(a, *[b]) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(*())     # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(*(a,))   # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(*(a, b)) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(*(b,))   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = f(*(b, b)) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = f(*[b])    # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+a = f()
+a = f(a)
+a = f(a, b)
+b = f(b)
+b = f(b, b)
+a = f(a, *[b])
+a = f(*())
+a = f(*(a,))
+a = f(*(a, b))
+b = f(*(b,))
+b = f(*(b, b))
+b = f(*[b])
+
+class A: pass
+class B: pass
+
+ at overload
+def f(a: A = None, *b: B) -> A: pass
+
+ at overload
+def f(a: B, *b: B) -> B: pass
+[builtins fixtures/list.pyi]
+
+
+-- Caller varargs + type inference
+-- -------------------------------
+
+
+[case testCallerVarArgsListWithTypeInference]
+from typing import List, TypeVar, Tuple
+S = TypeVar('S')
+T = TypeVar('T')
+a, b, aa = None, None, None # type: (A, B, List[A])
+
+a, b = f(*aa)    # Fail
+b, b = f(*aa)    # Fail
+a, a = f(b, *aa) # Fail
+b, b = f(b, *aa) # Fail
+b, b = f(b, b, *aa) # Fail
+a, b = f(a, *a)  # Fail
+a, b = f(*a)     # Fail
+
+a, a = f(*aa)
+b, a = f(b, *aa)
+b, a = f(b, a, *aa)
+
+def f(a: S, *b: T) -> Tuple[S, T]:
+    pass
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+main:6: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
+main:7: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
+main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+main:9: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
+main:10: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
+main:11: error: List or tuple expected as variable arguments
+main:12: error: List or tuple expected as variable arguments
+
+[case testCallerVarArgsTupleWithTypeInference]
+from typing import TypeVar, Tuple
+S = TypeVar('S')
+T = TypeVar('T')
+a, b = None, None # type: (A, B)
+
+a, a = f(*(a, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, B]"; expected "A"
+b, b = f(a, *(b,))  # E: Argument 1 to "f" has incompatible type "A"; expected "B"
+a, a = f(*(a, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, B]"; expected "A"
+b, b = f(a, *(b,))  # E: Argument 1 to "f" has incompatible type "A"; expected "B"
+a, b = f(*(a, b, b)) # E: Too many arguments for "f"
+
+a, b = f(*(a, b))
+a, b = f(a, *(b,))
+
+def f(a: S, b: T) -> Tuple[S, T]: pass
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testCallerVarargsAndComplexTypeInference]
+from typing import List, TypeVar, Generic, Tuple
+T = TypeVar('T')
+S = TypeVar('S')
+a, b = None, None # type: (A, B)
+ao = None # type: List[object]
+aa = None # type: List[A]
+ab = None # type: List[B]
+
+a, aa = G().f(*[a])  # Fail
+aa, a = G().f(*[a])  # Fail
+ab, aa = G().f(*[a]) # Fail
+
+ao, ao = G().f(*[a]) # E: Incompatible types in assignment (expression has type List[<nothing>], variable has type List[object])
+aa, aa = G().f(*[a]) # E: Incompatible types in assignment (expression has type List[<nothing>], variable has type List[A])
+
+class G(Generic[T]):
+    def f(self, *a: S) -> Tuple[List[S], List[T]]:
+        pass
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+main:9: error: Incompatible types in assignment (expression has type List[A], variable has type "A")
+main:9: error: Incompatible types in assignment (expression has type List[<nothing>], variable has type List[A])
+main:10: error: Incompatible types in assignment (expression has type List[<nothing>], variable has type "A")
+main:11: error: Incompatible types in assignment (expression has type List[<nothing>], variable has type List[A])
+main:11: error: Argument 1 to "f" of "G" has incompatible type *List[A]; expected "B"
+
+
+-- Comment signatures
+-- ------------------
+
+
+[case testVarArgsAndCommentSignature]
+import typing
+def f(*x): # type: (*int) -> None
+    pass
+f(1)
+f(1, 2)
+f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+f(1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+
+
+-- Subtyping
+-- ---------
+
+
+[case testVarArgsFunctionSubtyping]
+from typing import Callable
+x = None # type: Callable[[int], None]
+def f(*x: int) -> None: pass
+def g(*x: str) -> None: pass
+x = f
+x = g # E: Incompatible types in assignment (expression has type Callable[[VarArg(str)], None], variable has type Callable[[int], None])
+[builtins fixtures/list.pyi]
+[out]
+
+
+-- Decorated method where self is implied by *args
+-- -----------------------------------------------
+
+[case testVarArgsCallableSelf]
+from typing import Callable
+def cm(func) -> Callable[..., None]: pass
+class C:
+    @cm
+    def foo(self) -> None: pass
+C().foo()
+C().foo(1)  # The decorator's return type says this should be okay
diff --git a/test-data/unit/check-warnings.test b/test-data/unit/check-warnings.test
new file mode 100644
index 0000000..c95baec
--- /dev/null
+++ b/test-data/unit/check-warnings.test
@@ -0,0 +1,184 @@
+-- Test cases for warning generation.
+
+-- Redundant casts
+-- ---------------
+
+[case testRedundantCast]
+# flags: --warn-redundant-casts
+from typing import cast
+a = 1
+b = cast(str, a)
+c = cast(int, a)
+[out]
+main:5: note: Redundant cast to "int"
+
+[case testRedundantCastWithIsinstance]
+# flags: --warn-redundant-casts
+from typing import cast, Union
+x = 1  # type: Union[int, str]
+if isinstance(x, str):
+    cast(str, x)
+[builtins fixtures/isinstance.pyi]
+[out]
+main:5: note: Redundant cast to "str"
+
+[case testCastToSuperclassNotRedundant]
+# flags: --warn-redundant-casts
+from typing import cast, TypeVar, List
+T = TypeVar('T')
+def add(xs: List[T], ys: List[T]) -> List[T]: pass
+class A: pass
+class B(A): pass
+a = A()
+b = B()
+# Without the cast, the following line would fail to type check.
+c = add([cast(A, b)], [a])
+[builtins fixtures/list.pyi]
+
+
+-- Unused 'type: ignore' comments
+-- ------------------------------
+
+[case testUnusedTypeIgnore]
+# flags: --warn-unused-ignores
+a = 1
+a = 'a' # type: ignore
+a = 2 # type: ignore # N: unused 'type: ignore' comment
+a = 'b' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testUnusedTypeIgnoreImport]
+# flags: --warn-unused-ignores
+import banana # type: ignore
+import m # type: ignore
+from m import * # type: ignore
+[file m.py]
+pass
+[out]
+main:3: note: unused 'type: ignore' comment
+main:4: note: unused 'type: ignore' comment
+
+
+-- No return
+-- ---------
+
+[case testNoReturn]
+# flags: --warn-no-return
+def f() -> int:
+    pass
+
+def g() -> int:
+    if bool():
+        return 1
+[builtins fixtures/list.pyi]
+[out]
+main:5: error: Missing return statement
+
+[case testNoReturnWhile]
+# flags: --warn-no-return
+def h() -> int:
+    while True:
+        if bool():
+            return 1
+
+def i() -> int:
+    while 1:
+        if bool():
+            return 1
+        if bool():
+            break
+
+def j() -> int:
+    while 1:
+        if bool():
+            return 1
+        if bool():
+            continue
+[builtins fixtures/list.pyi]
+[out]
+main:7: error: Missing return statement
+
+[case testNoReturnExcept]
+# flags: --warn-no-return
+def f() -> int:
+    try:
+        return 1
+    except:
+        pass
+def g() -> int:
+    try:
+        pass
+    except:
+        return 1
+    else:
+        return 1
+def h() -> int:
+    try:
+        pass
+    except:
+        pass
+    else:
+        pass
+    finally:
+        return 1
+[builtins fixtures/exception.pyi]
+[out]
+main:2: error: Missing return statement
+
+[case testNoReturnEmptyBodyWithDocstring]
+def f() -> int:
+    """Return the number of peppers."""
+    # This might be an @abstractmethod, for example
+    pass
+[out]
+
+
+-- Returning Any
+-- -------------
+
+[case testReturnAnyFromTypedFunction]
+# flags: --warn-return-any
+from typing import Any
+def g() -> Any: pass
+def f() -> int: return g()
+[out]
+main:4: warning: Returning Any from function with declared return type "builtins.int"
+
+[case testReturnAnySilencedFromTypedFunction]
+# flags: --warn-return-any
+from typing import Any
+def g() -> Any: pass
+def f() -> int:
+    result = g() # type: int
+    return result
+[out]
+
+[case testReturnAnyFromUntypedFunction]
+# flags: --warn-return-any
+from typing import Any
+def g() -> Any: pass
+def f(): return g()
+[out]
+
+[case testReturnAnyFromAnyTypedFunction]
+# flags: --warn-return-any
+from typing import Any
+def g() -> Any: pass
+def f() -> Any: return g()
+[out]
+
+[case testOKReturnAnyIfProperSubtype]
+# flags: --warn-return-any --strict-optional
+from typing import Any, Optional
+
+class Test(object):
+
+    def __init__(self) -> None:
+        self.attr = "foo"  # type: Any
+
+    def foo(self, do_it: bool) -> Optional[Any]:
+        if do_it:
+            return self.attr  # Should not warn here
+        else:
+            return None
+[builtins fixtures/list.pyi]
+[out]
diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test
new file mode 100644
index 0000000..098f81d
--- /dev/null
+++ b/test-data/unit/cmdline.test
@@ -0,0 +1,1033 @@
+-- Tests for command line parsing
+-- ------------------------------
+--
+-- The initial line specifies the command line, in the format
+--
+--   # cmd: mypy <options>
+
+
+-- Directories/packages on the command line
+-- ----------------------------------------
+
+[case testCmdlinePackage]
+# cmd: mypy pkg
+[file pkg/__init__.py]
+[file pkg/a.py]
+undef
+[file pkg/subpkg/__init__.py]
+[file pkg/subpkg/a.py]
+undef
+import pkg.subpkg.a
+[out]
+pkg/a.py:1: error: Name 'undef' is not defined
+pkg/subpkg/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlinePackageSlash]
+# cmd: mypy pkg/
+[file pkg/__init__.py]
+[file pkg/a.py]
+undef
+[file pkg/subpkg/__init__.py]
+[file pkg/subpkg/a.py]
+undef
+import pkg.subpkg.a
+[out]
+pkg/a.py:1: error: Name 'undef' is not defined
+pkg/subpkg/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlineNonPackage]
+# cmd: mypy dir
+[file dir/a.py]
+undef
+[file dir/subdir/a.py]
+undef
+[out]
+dir/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlineNonPackageSlash]
+# cmd: mypy dir/
+[file dir/a.py]
+undef
+[file dir/subdir/a.py]
+undef
+[out]
+dir/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlinePackageContainingSubdir]
+# cmd: mypy pkg
+[file pkg/__init__.py]
+[file pkg/a.py]
+undef
+[file pkg/subdir/a.py]
+undef
+[out]
+pkg/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlineNonPackageContainingPackage]
+# cmd: mypy dir
+[file dir/a.py]
+undef
+import subpkg.a
+[file dir/subpkg/__init__.py]
+[file dir/subpkg/a.py]
+undef
+[out]
+dir/subpkg/a.py:1: error: Name 'undef' is not defined
+dir/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlineInvalidPackageName]
+# cmd: mypy dir/sub.pkg/a.py
+[file dir/sub.pkg/__init__.py]
+[file dir/sub.pkg/a.py]
+undef
+[out]
+sub.pkg is not a valid Python package name
+
+[case testBadFileEncoding]
+# cmd: mypy a.py
+[file a.py]
+# coding: uft-8
+[out]
+mypy: can't decode file 'a.py': unknown encoding: uft-8
+
+[case testCannotIgnoreDuplicateModule]
+# cmd: mypy one/mod/__init__.py two/mod/__init__.py
+[file one/mod/__init__.py]
+# type: ignore
+[file two/mod/__init__.py]
+# type: ignore
+[out]
+two/mod/__init__.py: error: Duplicate module named 'mod'
+
+[case testFlagsFile]
+# cmd: mypy @flagsfile
+[file flagsfile]
+-2
+main.py
+[file main.py]
+def f():
+    try:
+        1/0
+    except ZeroDivisionError, err:
+        print err
+
+[case testConfigFile]
+# cmd: mypy main.py
+[file mypy.ini]
+[[mypy]
+python_version = 2.7
+[file main.py]
+def f():
+    try:
+        1/0
+    except ZeroDivisionError, err:
+        print err
+
+[case testErrorContextConfig]
+# cmd: mypy main.py
+[file mypy.ini]
+[[mypy]
+show_error_context=True
+[file main.py]
+def f() -> None:
+  0 + ""
+[out]
+main.py: note: In function "f":
+main.py:2: error: Unsupported operand types for + ("int" and "str")
+
+[case testAltConfigFile]
+# cmd: mypy --config-file config.ini main.py
+[file config.ini]
+[[mypy]
+python_version = 2.7
+[file main.py]
+def f():
+    try:
+        1/0
+    except ZeroDivisionError, err:
+        print err
+
+[case testPerFileConfigSection]
+# cmd: mypy x.py y.py z.py
+[file mypy.ini]
+[[mypy]
+disallow_untyped_defs = True
+[[mypy-y*]
+disallow_untyped_defs = False
+[[mypy-z*]
+disallow_untyped_calls = True
+[file x.py]
+def f(a):
+    pass
+def g(a: int) -> int:
+    return f(a)
+[file y.py]
+def f(a):
+    pass
+def g(a: int) -> int:
+    return f(a)
+[file z.py]
+def f(a):
+    pass
+def g(a: int) -> int:
+    return f(a)
+[out]
+z.py:1: error: Function is missing a type annotation
+z.py:4: error: Call to untyped function "f" in typed context
+x.py:1: error: Function is missing a type annotation
+
+[case testPerFileConfigSectionUntypedWithDisallowUnannotated]
+# cmd: mypy w.py x.py y.py z.py
+[file mypy.ini]
+[[mypy]
+disallow_any = unannotated
+[[mypy-y*]
+disallow_any =
+[[mypy-z*]
+disallow_untyped_defs = True
+[[mypy-w*]
+disallow_untyped_defs = False
+[file x.py]
+def f(a):
+    pass
+[file y.py]
+def f(a):
+    pass
+[file z.py]
+def f(a):
+    pass
+[file w.py]
+def f(a):
+    pass
+[out]
+z.py:1: error: Function is missing a type annotation
+x.py:1: error: Function is missing a type annotation
+
+[case testPerFileConfigSectionDisallowUnannotatedWithUntyped]
+# cmd: mypy x.py y.py z.py
+[file mypy.ini]
+[[mypy]
+disallow_untyped_defs = True
+[[mypy-y*]
+disallow_any =
+[[mypy-z*]
+disallow_any = unannotated
+[file x.py]
+def f(a):
+    pass
+[file y.py]
+def f(a):
+    pass
+[file z.py]
+def f(a):
+    pass
+[out]
+z.py:1: error: Function is missing a type annotation
+x.py:1: error: Function is missing a type annotation
+
+[case testPerFileConfigSectionDisallowUnannotatedNoOverride]
+# cmd: mypy x.py y.py z.py
+[file mypy.ini]
+[[mypy]
+[[mypy-x*]
+disallow_untyped_defs = True
+[[mypy-z*]
+disallow_any = unannotated
+[file x.py]
+def f(a):
+    pass
+[file y.py]
+def f(a):
+    pass
+[file z.py]
+def f(a):
+    pass
+[out]
+z.py:1: error: Function is missing a type annotation
+x.py:1: error: Function is missing a type annotation
+
+[case testPerFileConfigSectionMultipleMatches]
+# cmd: mypy xx.py xy.py yx.py yy.py
+[file mypy.ini]
+[[mypy]
+[[mypy-*x*]
+disallow_untyped_defs = True
+[[mypy-*y*]
+disallow_untyped_calls = True
+[file xx.py]
+def f(a): pass
+def g(a: int) -> int: return f(a)
+[file xy.py]
+def f(a): pass
+def g(a: int) -> int: return f(a)
+[file yx.py]
+def f(a): pass
+def g(a: int) -> int: return f(a)
+[file yy.py]
+def f(a): pass
+def g(a: int) -> int: return f(a)
+[out]
+yy.py:2: error: Call to untyped function "f" in typed context
+yx.py:1: error: Function is missing a type annotation
+yx.py:2: error: Call to untyped function "f" in typed context
+xy.py:1: error: Function is missing a type annotation
+xy.py:2: error: Call to untyped function "f" in typed context
+xx.py:1: error: Function is missing a type annotation
+
+[case testMultipleGlobConfigSection]
+# cmd: mypy x.py y.py z.py
+[file mypy.ini]
+[[mypy]
+[[mypy-x*,z*]
+disallow_untyped_defs = True
+[file x.py]
+def f(a): pass
+[file y.py]
+def f(a): pass
+[file z.py]
+def f(a): pass
+[out]
+z.py:1: error: Function is missing a type annotation
+x.py:1: error: Function is missing a type annotation
+
+[case testConfigErrorNoSection]
+# cmd: mypy -c pass
+[file mypy.ini]
+[out]
+mypy.ini: No [mypy] section in config file
+
+[case testConfigErrorUnknownFlag]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+bad = 0
+[out]
+mypy.ini: [mypy]: Unrecognized option: bad = 0
+
+[case testConfigErrorBadBoolean]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+ignore_missing_imports = nah
+[out]
+mypy.ini: [mypy]: ignore_missing_imports: Not a boolean: nah
+
+[case testConfigErrorNotPerFile]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+[[mypy-*]
+python_version = 3.4
+[out]
+mypy.ini: [mypy-*]: Per-module sections should only specify per-module flags (python_version)
+
+[case testConfigMypyPath]
+# cmd: mypy file.py
+[file mypy.ini]
+[[mypy]
+mypy_path =
+    foo:bar
+    , baz
+[file foo/foo.pyi]
+def foo(x: int) -> str: ...
+[file bar/bar.pyi]
+def bar(x: str) -> list: ...
+[file baz/baz.pyi]
+def baz(x: list) -> dict: ...
+[file file.py]
+import no_stubs
+from foo import foo
+from bar import bar
+from baz import baz
+baz(bar(foo(42)))
+baz(bar(foo('oof')))
+[out]
+file.py:1: error: Cannot find module named 'no_stubs'
+file.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+file.py:6: error: Argument 1 to "foo" has incompatible type "str"; expected "int"
+
+[case testIgnoreErrorsConfig]
+# cmd: mypy x.py y.py
+[file mypy.ini]
+[[mypy]
+[[mypy-x]
+ignore_errors = True
+[file x.py]
+"" + 0
+[file y.py]
+"" + 0
+[out]
+y.py:1: error: Unsupported operand types for + ("str" and "int")
+
+[case testConfigFollowImportsNormal]
+# cmd: mypy main.py
+[file main.py]
+from a import x
+x + 0
+x + ''  # E
+import a
+a.x + 0
+a.x + ''  # E
+a.y  # E
+a + 0  # E
+[file mypy.ini]
+[[mypy]
+follow_imports = normal
+[file a.py]
+x = 0
+x += ''  # Error reported here
+[out]
+a.py:2: error: Unsupported operand types for + ("int" and "str")
+main.py:3: error: Unsupported operand types for + ("int" and "str")
+main.py:6: error: Unsupported operand types for + ("int" and "str")
+main.py:7: error: Module has no attribute "y"
+main.py:8: error: Unsupported operand types for + (Module and "int")
+
+[case testConfigFollowImportsSilent]
+# cmd: mypy main.py
+[file main.py]
+from a import x
+x + ''
+import a
+a.x + ''
+a.y
+a + 0
+[file mypy.ini]
+[[mypy]
+follow_imports = silent
+[file a.py]
+x = 0
+x += ''  # No error reported
+[out]
+main.py:2: error: Unsupported operand types for + ("int" and "str")
+main.py:4: error: Unsupported operand types for + ("int" and "str")
+main.py:5: error: Module has no attribute "y"
+main.py:6: error: Unsupported operand types for + (Module and "int")
+
+[case testConfigFollowImportsSkip]
+# cmd: mypy main.py
+[file main.py]
+from a import x
+reveal_type(x)  # Expect Any
+import a
+reveal_type(a.x)  # Expect Any
+[file mypy.ini]
+[[mypy]
+follow_imports = skip
+[file a.py]
+/  # No error reported
+[out]
+main.py:2: error: Revealed type is 'Any'
+main.py:4: error: Revealed type is 'Any'
+
+[case testConfigFollowImportsError]
+# cmd: mypy main.py
+[file main.py]
+from a import x
+reveal_type(x)  # Expect Any
+import a  # Error reported here
+reveal_type(a.x)  # Expect Any
+[file mypy.ini]
+[[mypy]
+follow_imports = error
+[file a.py]
+/  # No error reported
+[out]
+main.py:1: note: Import of 'a' ignored
+main.py:1: note: (Using --follow-imports=error, module not passed on command line)
+main.py:2: error: Revealed type is 'Any'
+main.py:4: error: Revealed type is 'Any'
+
+[case testConfigFollowImportsSelective]
+# cmd: mypy main.py
+[file mypy.ini]
+[[mypy]
+[[mypy-normal]
+follow_imports = normal
+[[mypy-silent]
+follow_imports = silent
+[[mypy-skip]
+follow_imports = skip
+[[mypy-error]
+follow_imports = error
+[file main.py]
+import normal
+import silent
+import skip
+import error
+reveal_type(normal.x)
+reveal_type(silent.x)
+reveal_type(skip)
+reveal_type(error)
+[file normal.py]
+x = 0
+x += ''
+[file silent.py]
+x = 0
+x += ''
+[file skip.py]
+bla bla
+[file error.py]
+bla bla
+[out]
+main.py:4: note: Import of 'error' ignored
+main.py:4: note: (Using --follow-imports=error, module not passed on command line)
+normal.py:2: error: Unsupported operand types for + ("int" and "str")
+main.py:5: error: Revealed type is 'builtins.int'
+main.py:6: error: Revealed type is 'builtins.int'
+main.py:7: error: Revealed type is 'Any'
+main.py:8: error: Revealed type is 'Any'
+
+[case testConfigSilentMissingImportsOff]
+# cmd: mypy main.py
+[file main.py]
+import missing  # Expect error here
+reveal_type(missing.x)  # Expect Any
+[file mypy.ini]
+[[mypy]
+ignore_missing_imports = False
+[out]
+main.py:1: error: Cannot find module named 'missing'
+main.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main.py:2: error: Revealed type is 'Any'
+
+[case testConfigSilentMissingImportsOn]
+# cmd: mypy main.py
+[file main.py]
+import missing  # No error here
+reveal_type(missing.x)  # Expect Any
+[file mypy.ini]
+[[mypy]
+ignore_missing_imports = True
+[out]
+main.py:2: error: Revealed type is 'Any'
+
+[case testDotInFilenameOKScript]
+# cmd: mypy a.b.py c.d.pyi
+[file a.b.py]
+undef
+[file c.d.pyi]
+whatever
+[out]
+c.d.pyi:1: error: Name 'whatever' is not defined
+a.b.py:1: error: Name 'undef' is not defined
+
+[case testDotInFilenameOKFolder]
+# cmd: mypy my.folder
+[file my.folder/tst.py]
+undef
+[out]
+my.folder/tst.py:1: error: Name 'undef' is not defined
+
+[case testDotInFilenameNoImport]
+# cmd: mypy main.py
+[file main.py]
+import a.b
+[file a.b.py]
+whatever
+[out]
+main.py:1: error: Cannot find module named 'a'
+main.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main.py:1: error: Cannot find module named 'a.b'
+
+[case testPythonVersionTooOld10]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+python_version = 1.0
+[out]
+mypy.ini: [mypy]: python_version: Python major version '1' out of range (must be 2 or 3)
+
+[case testPythonVersionTooOld26]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+python_version = 2.6
+[out]
+mypy.ini: [mypy]: python_version: Python 2.6 is not supported (must be 2.7)
+
+[case testPythonVersionTooOld32]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+python_version = 3.2
+[out]
+mypy.ini: [mypy]: python_version: Python 3.2 is not supported (must be 3.3 or higher)
+
+[case testPythonVersionTooNew28]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+python_version = 2.8
+[out]
+mypy.ini: [mypy]: python_version: Python 2.8 is not supported (must be 2.7)
+
+[case testPythonVersionTooNew40]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+python_version = 4.0
+[out]
+mypy.ini: [mypy]: python_version: Python major version '4' out of range (must be 2 or 3)
+
+[case testPythonVersionAccepted27]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+python_version = 2.7
+[out]
+
+[case testPythonVersionAccepted33]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+python_version = 3.3
+[out]
+
+[case testPythonVersionAccepted36]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+python_version = 3.6
+[out]
+
+[case testDisallowAnyUnimported]
+# cmd: mypy main.py
+[file mypy.ini]
+[[mypy]
+disallow_any = unimported
+ignore_missing_imports = True
+[file main.py]
+from unreal import F
+
+def f(x: F) -> None: pass
+[out]
+main.py:3: error: Argument 1 to "f" becomes "Any" due to an unfollowed import
+
+[case testDisallowAnyEmpty]
+# cmd: mypy main.py
+[file mypy.ini]
+[[mypy]
+disallow_any =
+ignore_missing_imports = True
+[file main.py]
+from unreal import F
+
+def f(x: F) -> None: pass
+[out]
+
+[case testDisallowAnyExplicitDefSignature]
+# cmd: mypy m.py
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from typing import Any, List
+
+def f(x: Any) -> None:
+    pass
+
+def g() -> Any:
+    pass
+
+def h() -> List[Any]:
+    pass
+
+[out]
+m.py:3: error: Explicit "Any" is not allowed
+m.py:6: error: Explicit "Any" is not allowed
+m.py:9: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitVarDeclaration]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from typing import Any, List
+v: Any = ''
+w = ''  # type: Any
+class X:
+    y = ''  # type: Any
+
+[out]
+m.py:2: error: Explicit "Any" is not allowed
+m.py:3: error: Explicit "Any" is not allowed
+m.py:5: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitGenericVarDeclaration]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from typing import Any, List
+v: List[Any] = []
+[out]
+m.py:2: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitInheritance]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from typing import Any, List
+
+class C(Any):
+    pass
+
+class D(List[Any]):
+    pass
+[out]
+m.py:3: error: Explicit "Any" is not allowed
+m.py:6: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitAlias]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from typing import Any, List
+
+X = Any
+Y = List[Any]
+
+def foo(x: X) -> Y:  # no error
+    x.nonexistent()  # no error
+    return x
+
+[out]
+m.py:3: error: Explicit "Any" is not allowed
+m.py:4: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitGenericAlias]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from typing import Any, List, TypeVar, Tuple
+
+T = TypeVar('T')
+
+TupleAny = Tuple[Any, T]  # error
+
+def foo(x: TupleAny[str]) -> None:  # no error
+    pass
+
+def goo(x: TupleAny[Any]) -> None:  # error
+    pass
+
+[out]
+m.py:5: error: Explicit "Any" is not allowed
+m.py:10: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitCast]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from typing import Any, List, cast
+
+x = 1
+y = cast(Any, x)
+z = cast(List[Any], x)
+[out]
+m.py:4: error: Explicit "Any" is not allowed
+m.py:5: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitNamedTuple]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from typing import Any, List, NamedTuple
+
+Point = NamedTuple('Point', [('x', List[Any]),
+                             ('y', Any)])
+
+[out]
+m.py:3: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitTypeVarConstraint]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from typing import Any, List, TypeVar
+
+T = TypeVar('T', Any, List[Any])
+[out]
+m.py:3: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitNewType]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from typing import Any, List, NewType
+
+Baz = NewType('Baz', Any)  # this error does not come from `--disallow-any=explicit` flag
+Bar = NewType('Bar', List[Any])
+
+[out]
+m.py:3: error: Argument 2 to NewType(...) must be subclassable (got Any)
+m.py:4: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitTypedDictSimple]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from mypy_extensions import TypedDict
+from typing import Any
+
+M = TypedDict('M', {'x': str, 'y': Any})  # error
+M(x='x', y=2)  # no error
+def f(m: M) -> None: pass  # no error
+[out]
+m.py:4: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyExplicitTypedDictGeneric]
+# cmd: mypy m.py
+
+[file mypy.ini]
+[[mypy]
+[[mypy-m*]
+disallow_any = explicit
+
+[file m.py]
+from mypy_extensions import TypedDict
+from typing import Any, List
+
+M = TypedDict('M', {'x': str, 'y': List[Any]})  # error
+N = TypedDict('N', {'x': str, 'y': List})  # no error
+[out]
+m.py:4: error: Explicit "Any" is not allowed
+
+[case testDisallowAnyGenericsTupleNoTypeParams]
+# cmd: mypy m.py
+[file mypy.ini]
+[[mypy]
+[[mypy-m]
+disallow_any = generics
+
+[file m.py]
+from typing import Tuple
+
+def f(s: Tuple) -> None: pass  # error
+def g(s) -> Tuple:  # error
+    return 'a', 'b'
+def h(s) -> Tuple[str, str]:  # no error
+    return 'a', 'b'
+x: Tuple = ()  # error
+[out]
+m.py:3: error: Missing type parameters for generic type
+m.py:4: error: Missing type parameters for generic type
+m.py:8: error: Missing type parameters for generic type
+
+[case testDisallowAnyGenericsTupleWithNoTypeParamsGeneric]
+# cmd: mypy m.py
+[file mypy.ini]
+[[mypy]
+[[mypy-m]
+disallow_any = generics
+
+[file m.py]
+from typing import Tuple, List
+
+def f(s: List[Tuple]) -> None: pass  # error
+def g(s: List[Tuple[str, str]]) -> None: pass  # no error
+[out]
+m.py:3: error: Missing type parameters for generic type
+
+[case testDisallowAnyGenericsTypeType]
+# cmd: mypy m.py
+[file mypy.ini]
+[[mypy]
+[[mypy-m]
+disallow_any = generics
+
+[file m.py]
+from typing import Type, Any
+
+def f(s: Type[Any]) -> None: pass  # no error
+def g(s) -> Type:  # error
+    return s
+def h(s) -> Type[str]:  # no error
+    return s
+x: Type = g(0)  # error
+[out]
+m.py:4: error: Missing type parameters for generic type
+m.py:8: error: Missing type parameters for generic type
+
+[case testDisallowAnyGenericsAliasGenericType]
+# cmd: mypy m.py
+[file mypy.ini]
+[[mypy]
+[[mypy-m]
+disallow_any = generics
+
+[file m.py]
+from typing import List
+
+L = List  # no error
+
+def f(l: L) -> None: pass  # error
+def g(l: L[str]) -> None: pass  # no error
+[out]
+m.py:5: error: Missing type parameters for generic type
+
+[case testDisallowAnyGenericsGenericAlias]
+# cmd: mypy m.py
+[file mypy.ini]
+[[mypy]
+[[mypy-m]
+disallow_any = generics
+
+[file m.py]
+from typing import List, TypeVar, Tuple
+
+T = TypeVar('T')
+A = Tuple[T, str, T]
+
+def f(s: A) -> None: pass  # error
+def g(s) -> A:  # error
+    return 'a', 'b', 1
+def h(s) -> A[str]:  # no error
+    return 'a', 'b', 'c'
+x: A = ('a', 'b', 1)  # error
+[out]
+m.py:6: error: Missing type parameters for generic type
+m.py:7: error: Missing type parameters for generic type
+m.py:11: error: Missing type parameters for generic type
+
+[case testDisallowAnyGenericsPlainList]
+# cmd: mypy m.py
+[file mypy.ini]
+[[mypy]
+[[mypy-m]
+disallow_any = generics
+
+[file m.py]
+from typing import List
+
+def f(l: List) -> None: pass  # error
+def g(l: List[str]) -> None: pass  # no error
+def h(l: List[List]) -> None: pass  # error
+def i(l: List[List[List[List]]]) -> None: pass  # error
+
+x = []  # error: need type annotation
+y: List = []  # error
+[out]
+m.py:3: error: Missing type parameters for generic type
+m.py:5: error: Missing type parameters for generic type
+m.py:6: error: Missing type parameters for generic type
+m.py:8: error: Need type annotation for variable
+m.py:9: error: Missing type parameters for generic type
+
+[case testDisallowAnyGenericsCustomGenericClass]
+# cmd: mypy m.py
+[file mypy.ini]
+[[mypy]
+[[mypy-m]
+disallow_any = generics
+
+[file m.py]
+from typing import Generic, TypeVar, Any
+
+T = TypeVar('T')
+class G(Generic[T]): pass
+
+def f() -> G:  # error
+    return G()
+
+x: G[Any] = G()  # no error
+y: G = x  # error
+
+[out]
+m.py:6: error: Missing type parameters for generic type
+m.py:10: error: Missing type parameters for generic type
+
+[case testDisallowAnyGenericsBuiltinCollections]
+# cmd: mypy m.py
+[file mypy.ini]
+[[mypy]
+[[mypy-m]
+disallow_any = generics
+
+[file m.py]
+s = tuple([1, 2, 3])  # no error
+
+def f(t: tuple) -> None: pass
+def g() -> list: pass
+def h(s: dict) -> None: pass
+def i(s: set) -> None: pass
+def j(s: frozenset) -> None: pass
+[out]
+m.py:3: error: Implicit generic "Any". Use 'typing.Tuple' and specify generic parameters
+m.py:4: error: Implicit generic "Any". Use 'typing.List' and specify generic parameters
+m.py:5: error: Implicit generic "Any". Use 'typing.Dict' and specify generic parameters
+m.py:6: error: Implicit generic "Any". Use 'typing.Set' and specify generic parameters
+m.py:7: error: Implicit generic "Any". Use 'typing.FrozenSet' and specify generic parameters
+
+[case testDisallowAnyGenericsTypingCollections]
+# cmd: mypy m.py
+[file mypy.ini]
+[[mypy]
+[[mypy-m]
+disallow_any = generics
+
+[file m.py]
+from typing import Tuple, List, Dict, Set, FrozenSet
+
+def f(t: Tuple) -> None: pass
+def g() -> List: pass
+def h(s: Dict) -> None: pass
+def i(s: Set) -> None: pass
+def j(s: FrozenSet) -> None: pass
+[out]
+m.py:3: error: Missing type parameters for generic type
+m.py:4: error: Missing type parameters for generic type
+m.py:5: error: Missing type parameters for generic type
+m.py:6: error: Missing type parameters for generic type
+m.py:7: error: Missing type parameters for generic type
diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test
new file mode 100644
index 0000000..a3d6533
--- /dev/null
+++ b/test-data/unit/deps.test
@@ -0,0 +1,281 @@
+-- Test cases for generating dependencies between ASTs nodes.
+--
+-- The dependencies are used for fined-grained incremental checking.
+
+
+[case testCallFunction]
+def f() -> None:
+    g()
+def g() -> None:
+    pass
+[out]
+<m.g> -> m.f
+
+[case testCallMethod]
+def f(a: A) -> None:
+    a.g()
+class A:
+    def g(self) -> None: pass
+[out]
+<m.A.g> -> m.f
+<m.A> -> <m.f>, m.A, m.f
+
+[case testAccessAttribute]
+def f(a: A) -> None:
+    a.x
+class A:
+    def g(self) -> None:
+        self.x = 1
+[out]
+<m.A.x> -> m.A.g, m.f
+<m.A> -> <m.f>, m.A, m.f
+
+[case testConstructInstance]
+def f() -> None:
+    A()
+class A: pass
+[out]
+<m.A.__init__> -> m.f
+<m.A> -> m.A, m.f
+
+[case testAccessModuleAttribute]
+x = 1
+def f() -> None:
+    x
+[out]
+<m.x> -> m, m.f
+<builtins.int> -> m
+
+[case testAccessModuleAttribute2]
+import n
+def f() -> None:
+    n.x
+[file n.py]
+x = 1
+[out]
+<n.x> -> m.f
+<n> -> m, m.f
+
+[case testImport]
+import n
+[file n.py]
+x = 1
+[out]
+<n> -> m
+
+[case testCallImportedFunction]
+import n
+n.f()
+[file n.py]
+def f() -> None: pass
+[out]
+<n.f> -> m
+<n> -> m
+
+[case testCallImportedFunctionInFunction]
+import n
+def g() -> None:
+    n.f()
+[file n.py]
+def f() -> None: pass
+[out]
+<n.f> -> m.g
+<n> -> m, m.g
+
+[case testInheritanceSimple]
+class A:
+    pass
+class B(A):
+    pass
+[out]
+<m.A.__init__> -> <m.B.__init__>
+<m.A> -> m.A, m.B
+<m.B> -> m.B
+
+[case testInheritanceWithMethodAndAttribute]
+class A:
+    pass
+class B(A):
+    def f(self) -> None:
+        self.x = 1
+[out]
+<m.A.__init__> -> <m.B.__init__>
+<m.A.f> -> m.B.f
+<m.A.x> -> <m.B.x>
+<m.A> -> m.A, m.B
+<m.B.x> -> m.B.f
+<m.B> -> m.B
+
+[case testInheritanceWithMethodAndAttributeAndDeepHierarchy]
+class A:
+    pass
+class B(A):
+    pass
+class C(B):
+    def f(self) -> None:
+        self.x = 1
+[out]
+<m.A.__init__> -> <m.B.__init__>, <m.C.__init__>
+<m.A.f> -> m.C.f
+<m.A.x> -> <m.C.x>
+<m.A> -> m.A, m.B
+<m.B.__init__> -> <m.C.__init__>
+<m.B.f> -> m.C.f
+<m.B.x> -> <m.C.x>
+<m.B> -> m.B, m.C
+<m.C.x> -> m.C.f
+<m.C> -> m.C
+
+[case testInheritAttribute]
+import n
+class B(n.A):
+    def f(self) -> None:
+        a = 1
+        a = self.x
+[file n.py]
+class A:
+    def g(self) -> None:
+        self.x = 1
+[out]
+<m.B.x> -> m.B.f
+<m.B> -> m.B
+<n.A.__init__> -> <m.B.__init__>
+<n.A.f> -> m.B.f
+<n.A.g> -> <m.B.g>
+<n.A.x> -> <m.B.x>
+<n.A> -> m.B
+<n> -> m, m.B
+
+[case testInheritMethod]
+class A:
+    def g(self) -> None: pass
+class B(A):
+    def f(self) -> None:
+        self.g()
+[out]
+<m.A.__init__> -> <m.B.__init__>
+<m.A.f> -> m.B.f
+<m.A.g> -> <m.B.g>
+<m.A> -> m.A, m.B
+<m.B.g> -> m.B.f
+<m.B> -> m.B
+
+[case testPackage]
+import a.b
+def f() -> None:
+    a.b.g()
+[file a/__init__.py]
+[file a/b.py]
+def g() -> None: pass
+[out]
+<a.b.g> -> m.f
+<a.b> -> m, m.f
+<a> -> m.f
+
+[case testClassInPackage]
+import a.b
+def f(x: a.b.A) -> None:
+    x.g()
+    x.y
+[file a/__init__.py]
+[file a/b.py]
+class A:
+    def g(self) -> None:
+        self.y = 1
+[out]
+<a.b.A.g> -> m.f
+<a.b.A.y> -> m.f
+<a.b.A> -> <m.f>, m.f
+<a.b> -> m
+
+[case testPackage__init__]
+import a
+def f() -> None:
+    a.g()
+[file a/__init__.py]
+def g() -> None: pass
+[out]
+<a.g> -> m.f
+<a> -> m, m.f
+
+[case testClassInPackage__init__]
+import a
+def f(x: a.A) -> None:
+    x.g()
+    x.y
+[file a/__init__.py]
+class A:
+    def g(self) -> None:
+        self.y = 1
+[out]
+<a.A.g> -> m.f
+<a.A.y> -> m.f
+<a.A> -> <m.f>, m.f
+<a> -> m
+
+[case testConstructor]
+class A:
+    def __init__(self, x: int) -> None: pass
+def f() -> None:
+    A(1)
+[out]
+<m.A.__init__> -> m.f
+<m.A> -> m.A, m.f
+<builtins.int> -> <m.A.__init__>, m.A.__init__
+
+[case testImportFrom]
+from n import f
+
+def g() -> None:
+    f()
+[file n.py]
+def f() -> None: pass
+[out]
+<n.f> -> m, m.g
+
+[case testNestedClass]
+def f() -> None:
+    b = A.B()
+    b.f()
+class A:
+    class B:
+        def f(self) -> None: pass
+[out]
+<m.A.B.__init__> -> m.f
+<m.A.B.f> -> m.f
+<m.A.B> -> m.A.B, m.f
+<m.A> -> m.A, m.f
+
+[case testNestedClassAttribute]
+def f() -> None:
+    b = A.B()
+    b.x
+class A:
+    class B:
+        def f(self) -> None:
+            self.x = 1
+[out]
+<m.A.B.__init__> -> m.f
+<m.A.B.x> -> m.A.B.f, m.f
+<m.A.B> -> m.A.B, m.f
+<m.A> -> m.A, m.f
+
+[case testNestedClassInAnnotation]
+def f(x: A.B) -> None:
+    pass
+class A:
+    class B: pass
+[out]
+<m.A.B> -> <m.f>, m.A.B, m.f
+<m.A> -> m.A
+
+[case testNestedClassInAnnotation2]
+def f(x: A.B) -> None:
+    x.f()
+class A:
+    class B:
+        def f(self) -> None: pass
+[out]
+<m.A.B.f> -> m.f
+<m.A.B> -> <m.f>, m.A.B, m.f
+<m.A> -> m.A
diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test
new file mode 100644
index 0000000..638948c
--- /dev/null
+++ b/test-data/unit/diff.test
@@ -0,0 +1,267 @@
+-- Test cases for taking a diff of two module ASTs/symbol tables.
+-- The diffs are used for fined-grained incremental checking.
+
+--
+-- Module top-levels
+--
+
+[case testChangeTypeOfModuleAttribute]
+x = 1
+y = 1
+[file next.py]
+x = ''
+y = 1
+[out]
+__main__.x
+
+[case testChangeSignatureOfModuleFunction]
+def f(x: int) -> None:
+    pass
+def g(y: str) -> None:
+    pass
+[file next.py]
+def f(x: str) -> None:
+    x = ''
+def g(y: str) -> None:
+    y = ''
+[out]
+__main__.f
+
+[case testAddModuleAttribute]
+x = 1
+[file next.py]
+x = 1
+y = 1
+[out]
+__main__.y
+
+[case testRemoveModuleAttribute]
+x = 1
+y = 1
+[file next.py]
+x = 1
+[out]
+__main__.y
+
+--
+-- Classes
+--
+
+[case testChangeMethodSignature]
+class A:
+    def f(self) -> None: pass
+    def g(self) -> None: pass
+[file next.py]
+class A:
+    def f(self, x: int) -> None: pass
+    def g(self) -> None: pass
+[out]
+__main__.A.f
+
+[case testChangeAttributeType]
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.y = 1
+[file next.py]
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.y = ''
+[out]
+__main__.A.y
+
+[case testAddAttribute]
+class A: pass
+[file next.py]
+class A:
+    def f(self) -> None:
+        self.x = 1
+[out]
+__main__.A.f
+__main__.A.x
+
+[case testAddAttribute2]
+class A:
+    def f(self) -> None: pass
+[file next.py]
+class A:
+    def f(self) -> None:
+        self.x = 1
+[out]
+__main__.A.x
+
+[case testRemoveAttribute]
+class A:
+    def f(self) -> None:
+        self.x = 1
+[file next.py]
+class A: pass
+[out]
+__main__.A.f
+__main__.A.x
+
+[case testAddMethod]
+class A:
+    def f(self) -> None: pass
+[file next.py]
+class A:
+    def f(self) -> None: pass
+    def g(self) -> None: pass
+[out]
+__main__.A.g
+
+[case testRemoveMethod]
+class A:
+    def f(self) -> None: pass
+    def g(self) -> None: pass
+[file next.py]
+class A:
+    def f(self) -> None: pass
+[out]
+__main__.A.g
+
+[case testAddImport]
+import nn
+[file next.py]
+import n
+import nn
+[file n.py]
+x = 1
+[file nn.py]
+y = 1
+[out]
+__main__.n
+
+[case testRemoveImport]
+import n
+[file next.py]
+[file n.py]
+x = 1
+[out]
+__main__.n
+
+[case testChangeClassIntoFunction]
+class A: pass
+[file next.py]
+def A() -> None: pass
+[out]
+__main__.A
+
+[case testDeleteClass]
+class A: pass
+[file next.py]
+[out]
+__main__.A
+
+[case testAddBaseClass]
+class A: pass
+[file next.py]
+class B: pass
+class A(B): pass
+[out]
+__main__.A
+__main__.B
+
+[case testChangeBaseClass]
+class A: pass
+class B: pass
+class C(A): pass
+[file next.py]
+class A: pass
+class B: pass
+class C(B): pass
+[out]
+__main__.C
+
+[case testRemoveBaseClass]
+class A: pass
+class B(A): pass
+[file next.py]
+class A: pass
+class B: pass
+[out]
+__main__.B
+
+[case testRemoveClassFromMiddleOfMro]
+class A: pass
+class B(A): pass
+class C(B): pass
+[file next.py]
+class A: pass
+class B: pass
+class C(B): pass
+[out]
+__main__.B
+__main__.C
+
+[case testDifferenceInConstructor]
+class A:
+    def __init__(self) -> None: pass
+[file next.py]
+class A:
+    def __init__(self, x: int) -> None: pass
+[out]
+__main__.A.__init__
+
+[case testChangeSignatureOfMethodInNestedClass]
+class A:
+    class B:
+        def f(self) -> int: pass
+[file next.py]
+class A:
+    class B:
+        def f(self) -> str: pass
+[out]
+__main__.A.B.f
+
+[case testChangeTypeOfAttributeInNestedClass]
+class A:
+    class B:
+        def f(self) -> None:
+            self.x = 1
+[file next.py]
+class A:
+    class B:
+        def f(self) -> None:
+            self.x = ''
+[out]
+__main__.A.B.x
+
+[case testAddMethodToNestedClass]
+class A:
+    class B: pass
+[file next.py]
+class A:
+    class B:
+        def f(self) -> str: pass
+[out]
+__main__.A.B.f
+
+[case testAddNestedClass]
+class A: pass
+[file next.py]
+class A:
+    class B:
+        def f(self) -> None: pass
+[out]
+__main__.A.B
+
+[case testRemoveNestedClass]
+class A:
+    class B:
+        def f(self) -> None: pass
+[file next.py]
+class A: pass
+[out]
+__main__.A.B
+
+[case testChangeNestedClassToMethod]
+class A:
+    class B: pass
+[file next.py]
+class A:
+    def B(self) -> None: pass
+
+[out]
+__main__.A.B
diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test
new file mode 100644
index 0000000..17d126f
--- /dev/null
+++ b/test-data/unit/fine-grained.test
@@ -0,0 +1,872 @@
+-- Test cases for fine-grained incremental checking
+--
+-- Test cases may define multiple versions of a file
+-- (e.g. m.py, m.py.2). There is always an initial batch
+-- pass that processes all files present initially, followed
+-- by one or more fine-grained incremental passes that use
+-- alternative versions of files, if available. If a file
+-- just has a single .py version, it is used for all passes.
+
+-- TODO: what if version for some passes but not all
+
+-- Output is laid out like this:
+--
+--   [out]
+--   <optional output from batch pass>
+--   ==
+--   <optional output from first incremental pass>
+
+[case testReprocessFunction]
+import m
+def g() -> int:
+    return m.f()
+[file m.py]
+def f() -> int:
+    pass
+[file m.py.2]
+def f() -> str:
+    pass
+[out]
+==
+main:3: error: Incompatible return value type (got "str", expected "int")
+
+[case testReprocessTopLevel]
+import m
+m.f(1)
+def g() -> None: pass
+[file m.py]
+def f(x: int) -> None: pass
+[file m.py.2]
+def f(x: str) -> None: pass
+[out]
+==
+main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str"
+
+[case testReprocessMethod]
+import m
+class B:
+    def f(self, a: m.A) -> None:
+        a.g() # E
+[file m.py]
+class A:
+    def g(self) -> None: pass
+[file m.py.2]
+class A:
+    def g(self, a: A) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "g" of "A"
+
+[case testFunctionMissingModuleAttribute]
+import m
+def h() -> None:
+    m.f(1)
+[file m.py]
+def f(x: int) -> None: pass
+[file m.py.2]
+def g(x: str) -> None: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+==
+main:3: error: Module has no attribute "f"
+
+[case testTopLevelMissingModuleAttribute]
+import m
+m.f(1)
+def g() -> None: pass
+[file m.py]
+def f(x: int) -> None: pass
+[file m.py.2]
+def g(x: int) -> None: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+==
+main:2: error: Module has no attribute "f"
+
+[case testClassChangedIntoFunction]
+import m
+def f(a: m.A) -> None:
+    pass
+[file m.py]
+class A: pass
+[file m.py.2]
+def A() -> None: pass
+[out]
+==
+main:2: error: Invalid type "m.A"
+
+[case testClassChangedIntoFunction2]
+import m
+class B:
+    def f(self, a: m.A) -> None: pass
+[file m.py]
+class A: pass
+[file m.py.2]
+def A() -> None: pass
+[out]
+==
+main:3: error: Invalid type "m.A"
+
+[case testAttributeTypeChanged]
+import m
+def f(a: m.A) -> int:
+    return a.x
+[file m.py]
+class A:
+    def f(self) -> None:
+        self.x = 1
+[file m.py.2]
+class A:
+    def f(self) -> None:
+        self.x = 'x'
+[out]
+==
+main:3: error: Incompatible return value type (got "str", expected "int")
+
+[case testAttributeRemoved]
+import m
+def f(a: m.A) -> int:
+    return a.x
+[file m.py]
+class A:
+    def f(self) -> None:
+        self.x = 1
+[file m.py.2]
+class A:
+    def f(self) -> None: pass
+[out]
+==
+main:3: error: "A" has no attribute "x"
+
+[case testVariableTypeBecomesInvalid]
+import m
+def f() -> None:
+    a = None # type: m.A
+[file m.py]
+class A: pass
+[file m.py.2]
+[out]
+==
+main:3: error: Name 'm.A' is not defined
+
+[case testTwoIncrementalSteps]
+import m
+import n
+[file m.py]
+def f() -> None: pass
+[file n.py]
+import m
+def g() -> None:
+    m.f() # E
+[file m.py.2]
+import n
+def f(x: int) -> None:
+    n.g() # E
+[file n.py.3]
+import m
+def g(a: str) -> None:
+    m.f('') # E
+[out]
+==
+n.py:3: error: Too few arguments for "f"
+==
+n.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+m.py:3: error: Too few arguments for "g"
+
+[case testTwoRounds]
+import m
+def h(a: m.A) -> int:
+    return a.x
+[file m.py]
+import n
+class A:
+    def g(self, b: n.B) -> None:
+        self.x = b.f()
+[file n.py]
+class B:
+    def f(self) -> int: pass
+[file n.py.2]
+class B:
+    def f(self) -> str: pass
+[out]
+==
+main:3: error: Incompatible return value type (got "str", expected "int")
+
+[case testFixTypeError]
+import m
+def f(a: m.A) -> None:
+    a.f(a)
+[file m.py]
+class A:
+    def f(self, a: 'A') -> None: pass
+[file m.py.2]
+class A:
+    def f(self) -> None: pass
+[file m.py.3]
+class A:
+    def f(self, a: 'A') -> None: pass
+[out]
+==
+main:3: error: Too many arguments for "f" of "A"
+==
+
+[case testFixTypeError2]
+import m
+def f(a: m.A) -> None:
+    a.f()
+[file m.py]
+class A:
+    def f(self) -> None: pass
+[file m.py.2]
+class A:
+    def g(self) -> None: pass
+[file m.py.3]
+class A:
+    def f(self) -> None: pass
+[out]
+==
+main:3: error: "A" has no attribute "f"
+==
+
+[case testFixSemanticAnalysisError]
+import m
+def f() -> None:
+    m.A()
+[file m.py]
+class A: pass
+[file m.py.2]
+class B: pass
+[file m.py.3]
+class A: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+==
+main:3: error: Module has no attribute "A"
+==
+
+[case testContinueToReportTypeCheckError]
+import m
+def f(a: m.A) -> None:
+    a.f()
+def g(a: m.A) -> None:
+    a.g()
+[file m.py]
+class A:
+    def f(self) -> None: pass
+    def g(self) -> None: pass
+[file m.py.2]
+class A: pass
+[file m.py.3]
+class A:
+    def f(self) -> None: pass
+[out]
+==
+main:3: error: "A" has no attribute "f"
+main:5: error: "A" has no attribute "g"
+==
+main:5: error: "A" has no attribute "g"
+
+[case testContinueToReportSemanticAnalysisError]
+import m
+def f() -> None:
+    m.A()
+def g() -> None:
+    m.B()
+[file m.py]
+class A: pass
+class B: pass
+[file m.py.2]
+[file m.py.3]
+class A: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+==
+main:3: error: Module has no attribute "A"
+main:5: error: Module has no attribute "B"
+==
+main:5: error: Module has no attribute "B"
+
+[case testContinueToReportErrorAtTopLevel]
+import n
+import m
+m.A().f()
+[file n.py]
+import m
+m.A().g()
+[file m.py]
+class A:
+    def f(self) -> None: pass
+    def g(self) -> None: pass
+[file m.py.2]
+class A: pass
+[file m.py.3]
+class A:
+    def f(self) -> None: pass
+[out]
+==
+main:3: error: "A" has no attribute "f"
+n.py:2: error: "A" has no attribute "g"
+==
+n.py:2: error: "A" has no attribute "g"
+
+[case testContinueToReportErrorInMethod]
+import m
+class C:
+    def f(self, a: m.A) -> None:
+        a.f()
+    def g(self, a: m.A) -> None:
+        a.g()
+[file m.py]
+class A:
+    def f(self) -> None: pass
+    def g(self) -> None: pass
+[file m.py.2]
+class A: pass
+[file m.py.3]
+class A:
+    def f(self) -> None: pass
+[out]
+==
+main:4: error: "A" has no attribute "f"
+main:6: error: "A" has no attribute "g"
+==
+main:6: error: "A" has no attribute "g"
+
+[case testInitialBatchGeneratedError]
+import m
+def g() -> None:
+    m.f()
+def h() -> None:
+    m.g()
+[file m.py]
+def f(x: object) -> None: pass
+[file m.py.2]
+def f() -> None: pass
+[file m.py.3]
+def f() -> None: pass
+def g() -> None: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+main:3: error: Too few arguments for "f"
+main:5: error: Module has no attribute "g"
+==
+main:5: error: Module has no attribute "g"
+==
+
+[case testKeepReportingErrorIfNoChanges]
+import m
+def h() -> None:
+    m.g()
+[file m.py]
+[file m.py.2]
+[builtins fixtures/fine_grained.pyi]
+[out]
+main:3: error: Module has no attribute "g"
+==
+main:3: error: Module has no attribute "g"
+
+[case testFixErrorAndReintroduce]
+import m
+def h() -> None:
+    m.g()
+[file m.py]
+[file m.py.2]
+def g() -> None: pass
+[file m.py.3]
+[builtins fixtures/fine_grained.pyi]
+[out]
+main:3: error: Module has no attribute "g"
+==
+==
+main:3: error: Module has no attribute "g"
+
+[case testAddBaseClassMethodCausingInvalidOverride]
+import m
+class B(m.A):
+    def f(self) -> str: pass
+[file m.py]
+class A: pass
+[file m.py.2]
+class A:
+    def f(self) -> int: pass
+[out]
+==
+main:3: error: Return type of "f" incompatible with supertype "A"
+
+[case testModifyBaseClassMethodCausingInvalidOverride]
+import m
+class B(m.A):
+    def f(self) -> str: pass
+[file m.py]
+class A:
+    def f(self) -> str: pass
+[file m.py.2]
+class A:
+    def f(self) -> int: pass
+[out]
+==
+main:3: error: Return type of "f" incompatible with supertype "A"
+
+[case testAddBaseClassAttributeCausingErrorInSubclass]
+import m
+class B(m.A):
+    def a(self) -> None:
+        x = 1
+        x = self.x
+
+    def f(self) -> None:
+        self.x = 1
+
+    def z(self) -> None:
+        x = 1
+        x = self.x
+[file m.py]
+class A: pass
+[file m.py.2]
+class A:
+    def g(self) -> None:
+        self.x = 'a'
+[out]
+==
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:8: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+main:12: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testChangeBaseClassAttributeType]
+import m
+class B(m.A):
+    def f(sel) -> None:
+        sel.x = 1
+[file m.py]
+class A:
+    def g(self) -> None:
+        self.x = 1
+[file m.py.2]
+class A:
+    def g(self) -> None:
+        self.x = 'a'
+[out]
+==
+main:4: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testRemoveAttributeInBaseClass]
+import m
+class B(m.A):
+    def f(self) -> None:
+        a = 1
+        a = self.x
+[file m.py]
+class A:
+    def g(self) -> None:
+        self.x = 1
+[file m.py.2]
+class A: pass
+[out]
+==
+main:5: error: "B" has no attribute "x"
+
+[case testTestSignatureOfInheritedMethod]
+import m
+class B(m.A):
+    def f(self) -> None:
+        self.g()
+[file m.py]
+class A:
+    def g(self) -> None: pass
+[file m.py.2]
+class A:
+    def g(self, a: 'A') -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "g" of "A"
+
+[case testRemoveBaseClass]
+import m
+class A(m.B):
+    def f(self) -> None:
+        self.g()
+        self.x
+        self.y = 1
+[file m.py]
+class C:
+    def g(self) -> None:
+        self.x = 1
+class B(C): pass
+[file m.py.2]
+class C: pass
+class B: pass
+[out]
+==
+main:4: error: "A" has no attribute "g"
+main:5: error: "A" has no attribute "x"
+
+[case testRemoveBaseClass2]
+import m
+class A(m.B):
+    def f(self) -> None:
+        self.g()
+        self.x
+        self.y = 1
+[file m.py]
+class C:
+    def g(self) -> None:
+        self.x = 1
+class B(C): pass
+[file m.py.2]
+class C:
+    def g(self) -> None:
+        self.x = 1
+class B: pass
+[out]
+==
+main:4: error: "A" has no attribute "g"
+main:5: error: "A" has no attribute "x"
+
+[case testChangeInPackage]
+import m.n
+def f() -> None:
+    m.n.g()
+[file m/__init__.py]
+[file m/n.py]
+def g() -> None: pass
+[file m/n.py.2]
+def g(x: int) -> None: pass
+[out]
+==
+main:3: error: Too few arguments for "g"
+
+[case testTriggerTargetInPackage]
+import m.n
+[file m/__init__.py]
+[file m/n.py]
+import a
+def f() -> None:
+    a.g()
+[file a.py]
+def g() -> None: pass
+[file a.py.2]
+def g(x: int) -> None: pass
+[out]
+==
+m/n.py:3: error: Too few arguments for "g"
+
+[case testChangeInPackage__init__]
+import m
+import m.n
+def f() -> None:
+    m.g()
+[file m/__init__.py]
+def g() -> None: pass
+[file m/__init__.py.2]
+def g(x: int) -> None: pass
+[file m/n.py]
+[out]
+==
+main:4: error: Too few arguments for "g"
+
+[case testTriggerTargetInPackage__init__]
+import m
+import m.n
+[file m/__init__.py]
+import a
+def f() -> None:
+    a.g()
+[file a.py]
+def g() -> None: pass
+[file a.py.2]
+def g(x: int) -> None: pass
+[file m/n.py]
+[out]
+==
+m/__init__.py:3: error: Too few arguments for "g"
+
+[case testModuleAttributeTypeChanges]
+import m
+def f() -> None:
+    x = 1
+    x = m.x
+[file m.py]
+x = 1
+[file m.py.2]
+x = ''
+[out]
+==
+main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testTwoStepsDueToModuleAttribute]
+import m
+x = m.f()
+
+def g() -> None:
+    y = 1
+    y = x # E
+[file m.py]
+def f() -> int: pass
+[file m.py.2]
+def f() -> str: pass
+[out]
+==
+main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testTwoStepsDueToMultipleNamespaces]
+import m
+
+x = m.f()
+
+def g() -> None:
+    xx = 1
+    xx = x
+
+class A:
+    def a(self) -> None:
+        self.y = m.f()
+    def b(self) -> None:
+        yy = 1
+        yy = self.y
+
+class B:
+    def c(self) -> None:
+        self.z = m.f()
+    def b(self) -> None:
+        zz = 1
+        zz = self.z
+[file m.py]
+def f() -> int: pass
+[file m.py.2]
+def f() -> str: pass
+[out]
+==
+main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:14: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:21: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testConstructorSignatureChanged]
+import m
+
+def f() -> None:
+    m.A()
+[file m.py]
+class A:
+    def __init__(self) -> None: pass
+[file m.py.2]
+class A:
+    def __init__(self, x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "A"
+
+[case testConstructorAdded]
+import m
+
+def f() -> None:
+    m.A()
+[file m.py]
+class A: pass
+[file m.py.2]
+class A:
+    def __init__(self, x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "A"
+
+[case testConstructorDeleted]
+import m
+
+def f() -> None:
+    m.A(1)
+[file m.py]
+class A:
+    def __init__(self, x: int) -> None: pass
+[file m.py.2]
+class A: pass
+[out]
+==
+main:4: error: Too many arguments for "A"
+
+[case testBaseClassConstructorChanged]
+import m
+
+def f() -> None:
+    m.B()
+[file m.py]
+class A:
+    def __init__(self) -> None: pass
+class B(A): pass
+[file m.py.2]
+class A:
+    def __init__(self, x: int) -> None: pass
+class B(A): pass
+[out]
+==
+main:4: error: Too few arguments for "B"
+
+[case testImportFrom]
+from m import f
+
+def g() -> None:
+    f()
+[file m.py]
+def f() -> None: pass
+[file m.py.2]
+def f(x: int) -> None: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+==
+main:4: error: Too few arguments for "f"
+
+[case testImportFrom2]
+from m import f
+f()
+[file m.py]
+def f() -> None: pass
+[file m.py.2]
+def f(x: int) -> None: pass
+[out]
+==
+main:2: error: Too few arguments for "f"
+
+[case testImportFromTargetsClass]
+from m import C
+
+def f(c: C) -> None:
+    c.g()
+[file m.py]
+class C:
+    def g(self) -> None: pass
+[file m.py.2]
+class C:
+    def g(self, x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "g" of "C"
+
+[case testImportFromTargetsVariable]
+from m import x
+
+def f() -> None:
+    y = 1
+    y = x
+[file m.py]
+x = 1
+[file m.py.2]
+x = ''
+[out]
+==
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testImportFromSubmoduleOfPackage]
+from m import n
+
+def f() -> None:
+    n.g()
+[file m/__init__.py]
+[file m/n.py]
+def g() -> None: pass
+[file m/n.py.2]
+def g(x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "g"
+
+[case testImportedFunctionGetsImported]
+from m import f
+
+def g() -> None:
+    f()
+[file m.py]
+from n import f
+[file n.py]
+def f() -> None: pass
+[file n.py.2]
+def f(x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "f"
+
+[case testNestedClassMethodSignatureChanges]
+from m import A
+
+def f(x: A.B) -> None:
+    x.g()
+[file m.py]
+class A:
+    class B:
+        def g(self) -> None: pass
+[file m.py.2]
+class A:
+    class B:
+        def g(self, x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "g" of "B"
+
+[case testNestedClassAttributeTypeChanges]
+from m import A
+
+def f(x: A.B) -> None:
+    z = 1
+    z = x.y
+[file m.py]
+class A:
+    class B:
+        def g(self) -> None:
+            self.y = 1
+[file m.py.2]
+class A:
+    class B:
+        def g(self) -> None:
+            self.y = ''
+[out]
+==
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testReprocessMethodInNestedClass]
+from m import f
+
+class A:
+    class B:
+        def g(self) -> None:
+            x = 1
+            x = f()
+[file m.py]
+def f() -> int: pass
+[file m.py.2]
+def f() -> str: pass
+[out]
+==
+main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testBaseClassDeleted]
+import m
+
+class A(m.C):
+    def f(self) -> None:
+        self.g()  # No error here because m.C becomes an Any base class
+    def g(self) -> None:
+        self.x
+[file m.py]
+class C:
+    def g(self) -> None: pass
+[file m.py.2]
+[out]
+main:7: error: "A" has no attribute "x"
+==
+main:3: error: Name 'm.C' is not defined
+
+[case testBaseClassOfNestedClassDeleted]
+import m
+
+class A:
+    class B(m.C):
+        def f(self) -> None:
+            self.g()  # No error here because m.C becomes an Any base class
+        def g(self) -> None:
+            self.x
+[file m.py]
+class C:
+    def g(self) -> None: pass
+[file m.py.2]
+[out]
+main:8: error: "B" has no attribute "x"
+==
+main:4: error: Name 'm.C' is not defined
diff --git a/test-data/unit/fixtures/__new__.pyi b/test-data/unit/fixtures/__new__.pyi
new file mode 100644
index 0000000..4e2cc57
--- /dev/null
+++ b/test-data/unit/fixtures/__new__.pyi
@@ -0,0 +1,14 @@
+# builtins stub with object.__new__
+
+class object:
+    def __init__(self) -> None: pass
+
+    def __new__(cls): pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class int: pass
+class bool: pass
+class str: pass
+class function: pass
diff --git a/test-data/unit/fixtures/alias.pyi b/test-data/unit/fixtures/alias.pyi
new file mode 100644
index 0000000..5909cb6
--- /dev/null
+++ b/test-data/unit/fixtures/alias.pyi
@@ -0,0 +1,12 @@
+# Builtins test fixture with a type alias 'bytes'
+
+class object:
+    def __init__(self) -> None: pass
+class type:
+    def __init__(self, x) -> None: pass
+
+class int: pass
+class str: pass
+class function: pass
+
+bytes = str
diff --git a/test-data/unit/fixtures/args.pyi b/test-data/unit/fixtures/args.pyi
new file mode 100644
index 0000000..b3d05b5
--- /dev/null
+++ b/test-data/unit/fixtures/args.pyi
@@ -0,0 +1,29 @@
+# Builtins stub used to support *args, **kwargs.
+
+from typing import TypeVar, Generic, Iterable, Tuple, Dict, Any, overload, Mapping
+
+Tco = TypeVar('Tco', covariant=True)
+T = TypeVar('T')
+S = TypeVar('S')
+
+class object:
+    def __init__(self) -> None: pass
+    def __eq__(self, o: object) -> bool: pass
+    def __ne__(self, o: object) -> bool: pass
+
+class type:
+    @overload
+    def __init__(self, o: object) -> None: pass
+    @overload
+    def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: pass
+    def __call__(self, *args: Any, **kwargs: Any) -> Any: pass
+
+class tuple(Iterable[Tco], Generic[Tco]): pass
+
+class dict(Iterable[T], Mapping[T, S], Generic[T, S]): pass
+
+class int:
+    def __eq__(self, o: object) -> bool: pass
+class str: pass
+class bool: pass
+class function: pass
diff --git a/test-data/unit/fixtures/async_await.pyi b/test-data/unit/fixtures/async_await.pyi
new file mode 100644
index 0000000..42c1b53
--- /dev/null
+++ b/test-data/unit/fixtures/async_await.pyi
@@ -0,0 +1,20 @@
+import typing
+
+T = typing.TypeVar('T')
+class list(typing.Generic[T], typing.Sequence[T]): pass
+
+class object:
+    def __init__(self): pass
+class type: pass
+class function: pass
+class int: pass
+class str: pass
+class dict: pass
+class set: pass
+class tuple: pass
+class BaseException: pass
+class StopIteration(BaseException): pass
+class StopAsyncIteration(BaseException): pass
+def iter(obj: typing.Any) -> typing.Any: pass
+def next(obj: typing.Any) -> typing.Any: pass
+class ellipsis: ...
diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi
new file mode 100644
index 0000000..c4b4f30
--- /dev/null
+++ b/test-data/unit/fixtures/bool.pyi
@@ -0,0 +1,12 @@
+# builtins stub used in boolean-related test cases.
+
+class object:
+    def __init__(self) -> None: pass
+
+class type: pass
+class tuple: pass
+class function: pass
+class bool: pass
+class int: pass
+class str: pass
+class unicode: pass
diff --git a/test-data/unit/fixtures/callable.pyi b/test-data/unit/fixtures/callable.pyi
new file mode 100644
index 0000000..ae58648
--- /dev/null
+++ b/test-data/unit/fixtures/callable.pyi
@@ -0,0 +1,26 @@
+from typing import Generic, Tuple, TypeVar, Union
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class tuple(Generic[T]): pass
+
+class function: pass
+
+def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass
+
+def callable(x: object) -> bool: pass
+
+class int:
+    def __add__(self, other: 'int') -> 'int': pass
+    def __eq__(self, other: 'int') -> 'bool': pass
+class float: pass
+class bool(int): pass
+class str:
+    def __add__(self, other: 'str') -> 'str': pass
+    def __eq__(self, other: 'str') -> bool: pass
diff --git a/test-data/unit/fixtures/classmethod.pyi b/test-data/unit/fixtures/classmethod.pyi
new file mode 100644
index 0000000..6d7f71b
--- /dev/null
+++ b/test-data/unit/fixtures/classmethod.pyi
@@ -0,0 +1,26 @@
+import typing
+
+_T = typing.TypeVar('_T')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+    def mro(self) -> typing.Any: pass
+
+class function: pass
+
+# Dummy definitions.
+classmethod = object()
+staticmethod = object()
+
+class int:
+    @classmethod
+    def from_bytes(cls, bytes: bytes, byteorder: str) -> int: pass
+
+class str: pass
+class bytes: pass
+class bool: pass
+
+class tuple(typing.Generic[_T]): pass
diff --git a/test-data/unit/fixtures/complex.pyi b/test-data/unit/fixtures/complex.pyi
new file mode 100644
index 0000000..d4135be
--- /dev/null
+++ b/test-data/unit/fixtures/complex.pyi
@@ -0,0 +1,11 @@
+# Builtins stub used for some float/complex test cases.
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class function: pass
+class int: pass
+class float: pass
+class complex: pass
+class str: pass
diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi
new file mode 100644
index 0000000..4182afb
--- /dev/null
+++ b/test-data/unit/fixtures/dict.pyi
@@ -0,0 +1,44 @@
+# Builtins stub used in dictionary-related test cases.
+
+from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union
+
+T = TypeVar('T')
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type: pass
+
+class dict(Mapping[KT, VT], Iterable[KT], Generic[KT, VT]):
+    @overload
+    def __init__(self, **kwargs: VT) -> None: pass
+    @overload
+    def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass
+    def __setitem__(self, k: KT, v: VT) -> None: pass
+    def __iter__(self) -> Iterator[KT]: pass
+    def update(self, a: Mapping[KT, VT]) -> None: pass
+    @overload
+    def get(self, k: KT) -> Optional[VT]: pass
+    @overload
+    def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass
+
+class int: # for convenience
+    def __add__(self, x: int) -> int: pass
+
+class str: pass # for keyword argument key type
+class unicode: pass # needed for py2 docstrings
+
+class list(Iterable[T], Generic[T]): # needed by some test cases
+    def __getitem__(self, x: int) -> T: pass
+    def __iter__(self) -> Iterator[T]: pass
+    def __mul__(self, x: int) -> list[T]: pass
+
+class tuple: pass
+class function: pass
+class float: pass
+class bool: pass
+
+class ellipsis: pass
+class BaseException: pass
diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi
new file mode 100644
index 0000000..5a2482d
--- /dev/null
+++ b/test-data/unit/fixtures/exception.pyi
@@ -0,0 +1,13 @@
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class tuple: pass
+class function: pass
+class int: pass
+class str: pass
+class unicode: pass
+class bool: pass
+
+class BaseException: pass
diff --git a/test-data/unit/fixtures/f_string.pyi b/test-data/unit/fixtures/f_string.pyi
new file mode 100644
index 0000000..78d39ae
--- /dev/null
+++ b/test-data/unit/fixtures/f_string.pyi
@@ -0,0 +1,36 @@
+# Builtins stub used for format-string-related test cases.
+# We need str and list, and str needs join and format methods.
+
+from typing import TypeVar, Generic, Iterable, Iterator, List, overload
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self): pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class ellipsis: pass
+
+class list(Iterable[T], Generic[T]):
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, x: Iterable[T]) -> None: pass
+    def append(self, x: T) -> None: pass
+
+class tuple(Generic[T]): pass
+
+class function: pass
+class int:
+    def __add__(self, i: int) -> int: pass
+
+class float: pass
+class bool(int): pass
+
+class str:
+    def __add__(self, s: str) -> str: pass
+    def format(self, *args) -> str: pass
+    def join(self, l: List[str]) -> str: pass
+
diff --git a/test-data/unit/fixtures/fine_grained.pyi b/test-data/unit/fixtures/fine_grained.pyi
new file mode 100644
index 0000000..83429cd
--- /dev/null
+++ b/test-data/unit/fixtures/fine_grained.pyi
@@ -0,0 +1,26 @@
+# Small stub for fine-grained incremental checking test cases
+#
+# TODO: Migrate to regular stubs once fine-grained incremental is robust
+#       enough to handle them.
+
+import types
+
+class Any: pass
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x: Any) -> None: pass
+
+class int:
+    def __add__(self, other: 'int') -> 'int': pass
+class str:
+    def __add__(self, other: 'str') -> 'str': pass
+
+class float: pass
+class bytes: pass
+class tuple: pass
+class function: pass
+class ellipsis: pass
+class list: pass
diff --git a/test-data/unit/fixtures/float.pyi b/test-data/unit/fixtures/float.pyi
new file mode 100644
index 0000000..38bdc08
--- /dev/null
+++ b/test-data/unit/fixtures/float.pyi
@@ -0,0 +1,31 @@
+Any = 0
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x: Any) -> None: pass
+
+class str:
+    def __add__(self, other: 'str') -> 'str': pass
+    def __rmul__(self, n: int) -> str: ...
+
+class bytes: pass
+
+class tuple: pass
+class function: pass
+
+class ellipsis: pass
+
+
+class int:
+    def __float__(self) -> float: ...
+    def __int__(self) -> int: ...
+    def __mul__(self, x: int) -> int: ...
+    def __rmul__(self, x: int) -> int: ...
+
+class float:
+    def __float__(self) -> float: ...
+    def __int__(self) -> int: ...
+    def __mul__(self, x: float) -> float: ...
+    def __rmul__(self, x: float) -> float: ...
diff --git a/test-data/unit/fixtures/floatdict.pyi b/test-data/unit/fixtures/floatdict.pyi
new file mode 100644
index 0000000..9a34f8d
--- /dev/null
+++ b/test-data/unit/fixtures/floatdict.pyi
@@ -0,0 +1,63 @@
+from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union
+
+T = TypeVar('T')
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
+Any = 0
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x: Any) -> None: pass
+
+class str:
+    def __add__(self, other: 'str') -> 'str': pass
+    def __rmul__(self, n: int) -> str: ...
+
+class bytes: pass
+
+class tuple: pass
+class function: pass
+
+class ellipsis: pass
+
+class list(Iterable[T], Generic[T]):
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, x: Iterable[T]) -> None: pass
+    def __iter__(self) -> Iterator[T]: pass
+    def __add__(self, x: list[T]) -> list[T]: pass
+    def __mul__(self, x: int) -> list[T]: pass
+    def __getitem__(self, x: int) -> T: pass
+    def append(self, x: T) -> None: pass
+    def extend(self, x: Iterable[T]) -> None: pass
+
+class dict(Iterable[KT], Mapping[KT, VT], Generic[KT, VT]):
+    @overload
+    def __init__(self, **kwargs: VT) -> None: pass
+    @overload
+    def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass
+    def __setitem__(self, k: KT, v: VT) -> None: pass
+    def __getitem__(self, k: KT) -> VT: pass
+    def __iter__(self) -> Iterator[KT]: pass
+    def update(self, a: Mapping[KT, VT]) -> None: pass
+    @overload
+    def get(self, k: KT) -> Optional[VT]: pass
+    @overload
+    def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass
+
+
+class int:
+    def __float__(self) -> float: ...
+    def __int__(self) -> int: ...
+    def __mul__(self, x: int) -> int: ...
+    def __rmul__(self, x: int) -> int: ...
+
+class float:
+    def __float__(self) -> float: ...
+    def __int__(self) -> int: ...
+    def __mul__(self, x: float) -> float: ...
+    def __rmul__(self, x: float) -> float: ...
diff --git a/test-data/unit/fixtures/for.pyi b/test-data/unit/fixtures/for.pyi
new file mode 100644
index 0000000..4762806
--- /dev/null
+++ b/test-data/unit/fixtures/for.pyi
@@ -0,0 +1,19 @@
+# builtins stub used in for statement test cases
+
+from typing import TypeVar, Generic, Iterable, Iterator, Generator
+from abc import abstractmethod, ABCMeta
+
+t = TypeVar('t')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type: pass
+class tuple: pass
+class function: pass
+class bool: pass
+class int: pass # for convenience
+class str: pass # for convenience
+
+class list(Iterable[t], Generic[t]):
+    def __iter__(self) -> Iterator[t]: pass
diff --git a/test-data/unit/fixtures/function.pyi b/test-data/unit/fixtures/function.pyi
new file mode 100644
index 0000000..c00a784
--- /dev/null
+++ b/test-data/unit/fixtures/function.pyi
@@ -0,0 +1,7 @@
+class object:
+    def __init__(self): pass
+
+class type: pass
+class function: pass
+class int: pass
+class str: pass
diff --git a/test-data/unit/fixtures/isinstance.pyi b/test-data/unit/fixtures/isinstance.pyi
new file mode 100644
index 0000000..ded946c
--- /dev/null
+++ b/test-data/unit/fixtures/isinstance.pyi
@@ -0,0 +1,24 @@
+from typing import Tuple, TypeVar, Generic, Union
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class tuple(Generic[T]): pass
+
+class function: pass
+
+def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass
+def issubclass(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass
+
+class int:
+    def __add__(self, other: 'int') -> 'int': pass
+class float: pass
+class bool(int): pass
+class str:
+    def __add__(self, other: 'str') -> 'str': pass
+class ellipsis: pass
diff --git a/test-data/unit/fixtures/isinstancelist.pyi b/test-data/unit/fixtures/isinstancelist.pyi
new file mode 100644
index 0000000..5ee49b8
--- /dev/null
+++ b/test-data/unit/fixtures/isinstancelist.pyi
@@ -0,0 +1,47 @@
+from typing import Iterable, Iterator, TypeVar, List, Mapping, overload, Tuple, Set, Union
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class tuple: pass
+class function: pass
+class ellipsis: pass
+
+def isinstance(x: object, t: Union[type, Tuple]) -> bool: pass
+def issubclass(x: object, t: Union[type, Tuple]) -> bool: pass
+
+class int:
+    def __add__(self, x: int) -> int: pass
+class bool(int): pass
+class str:
+    def __add__(self, x: str) -> str: pass
+    def __getitem__(self, x: int) -> str: pass
+
+T = TypeVar('T')
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
+class list(Iterable[T]):
+    def __iter__(self) -> Iterator[T]: pass
+    def __mul__(self, x: int) -> list[T]: pass
+    def __setitem__(self, x: int, v: T) -> None: pass
+    def __getitem__(self, x: int) -> T: pass
+    def __add__(self, x: List[T]) -> T: pass
+
+class dict(Iterable[KT], Mapping[KT, VT]):
+    @overload
+    def __init__(self, **kwargs: VT) -> None: pass
+    @overload
+    def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass
+    def __setitem__(self, k: KT, v: VT) -> None: pass
+    def __iter__(self) -> Iterator[KT]: pass
+    def update(self, a: Mapping[KT, VT]) -> None: pass
+
+class set(Iterable[T]):
+    def __iter__(self) -> Iterator[T]: pass
+    def add(self, x: T) -> None: pass
+    def discard(self, x: T) -> None: pass
+    def update(self, x: Set[T]) -> None: pass
diff --git a/test-data/unit/fixtures/list.pyi b/test-data/unit/fixtures/list.pyi
new file mode 100644
index 0000000..d5d1000
--- /dev/null
+++ b/test-data/unit/fixtures/list.pyi
@@ -0,0 +1,32 @@
+# Builtins stub used in list-related test cases.
+
+from typing import TypeVar, Generic, Iterable, Iterator, overload
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class ellipsis: pass
+
+class list(Iterable[T], Generic[T]):
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, x: Iterable[T]) -> None: pass
+    def __iter__(self) -> Iterator[T]: pass
+    def __add__(self, x: list[T]) -> list[T]: pass
+    def __mul__(self, x: int) -> list[T]: pass
+    def __getitem__(self, x: int) -> T: pass
+    def append(self, x: T) -> None: pass
+    def extend(self, x: Iterable[T]) -> None: pass
+
+class tuple(Generic[T]): pass
+class function: pass
+class int: pass
+class float: pass
+class str: pass
+class bool(int): pass
+
+property = object() # Dummy definition.
diff --git a/test-data/unit/fixtures/module.pyi b/test-data/unit/fixtures/module.pyi
new file mode 100644
index 0000000..44a4dfe
--- /dev/null
+++ b/test-data/unit/fixtures/module.pyi
@@ -0,0 +1,21 @@
+from typing import Any, Dict, Generic, TypeVar, Sequence
+from types import ModuleType
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class list(Generic[T], Sequence[T]): pass
+
+class object:
+    def __init__(self) -> None: pass
+class type: pass
+class function: pass
+class int: pass
+class str: pass
+class bool: pass
+class tuple: pass
+class dict(Generic[T, S]): pass
+class ellipsis: pass
+
+classmethod = object()
+staticmethod = object()
diff --git a/test-data/unit/fixtures/module_all.pyi b/test-data/unit/fixtures/module_all.pyi
new file mode 100644
index 0000000..2ab6bc6
--- /dev/null
+++ b/test-data/unit/fixtures/module_all.pyi
@@ -0,0 +1,18 @@
+from typing import Generic, Sequence, TypeVar
+from types import ModuleType
+
+_T = TypeVar('_T')
+
+class object:
+    def __init__(self) -> None: pass
+class type: pass
+class function: pass
+class int: pass
+class str: pass
+class bool: pass
+class list(Generic[_T], Sequence[_T]):
+    def append(self, x: _T): pass
+    def extend(self, x: Sequence[_T]): pass
+    def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass
+class tuple: pass
+class ellipsis: pass
diff --git a/test-data/unit/fixtures/module_all_python2.pyi b/test-data/unit/fixtures/module_all_python2.pyi
new file mode 100644
index 0000000..5a48e60
--- /dev/null
+++ b/test-data/unit/fixtures/module_all_python2.pyi
@@ -0,0 +1,15 @@
+from typing import Generic, Sequence, TypeVar
+_T = TypeVar('_T')
+
+class object:
+    def __init__(self) -> None: pass
+class type: pass
+class function: pass
+class int: pass
+class str: pass
+class unicode: pass
+class list(Generic[_T], Sequence[_T]):
+    def append(self, x: _T): pass
+    def extend(self, x: Sequence[_T]): pass
+    def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass
+class tuple: pass
diff --git a/test-data/unit/fixtures/ops.pyi b/test-data/unit/fixtures/ops.pyi
new file mode 100644
index 0000000..ae48a1f
--- /dev/null
+++ b/test-data/unit/fixtures/ops.pyi
@@ -0,0 +1,57 @@
+from typing import overload, Any, Generic, Sequence, Tuple, TypeVar
+
+Tco = TypeVar('Tco', covariant=True)
+
+# This is an extension of transform builtins with additional operations.
+
+class object:
+    def __init__(self) -> None: pass
+    def __eq__(self, o: 'object') -> 'bool': pass
+    def __ne__(self, o: 'object') -> 'bool': pass
+
+class type: pass
+
+class slice: pass
+
+class tuple(Sequence[Tco], Generic[Tco]):
+    def __getitem__(self, x: int) -> Tco: pass
+    def __eq__(self, x: object) -> bool: pass
+    def __ne__(self, x: object) -> bool: pass
+    def __lt__(self, x: 'tuple') -> bool: pass
+    def __le__(self, x: 'tuple') -> bool: pass
+    def __gt__(self, x: 'tuple') -> bool: pass
+    def __ge__(self, x: 'tuple') -> bool: pass
+
+class function: pass
+
+class bool: pass
+
+class str:
+    def __init__(self, x: 'int') -> None: pass
+    def __add__(self, x: 'str') -> 'str': pass
+    def __eq__(self, x: object) -> bool: pass
+    def startswith(self, x: 'str') -> bool: pass
+
+class unicode: pass
+
+class int:
+    def __add__(self, x: 'int') -> 'int': pass
+    def __sub__(self, x: 'int') -> 'int': pass
+    def __mul__(self, x: 'int') -> 'int': pass
+    def __mod__(self, x: 'int') -> 'int': pass
+    def __floordiv__(self, x: 'int') -> 'int': pass
+    def __pow__(self, x: 'int') -> Any: pass
+    def __pos__(self) -> 'int': pass
+    def __neg__(self) -> 'int': pass
+    def __eq__(self, x: object) -> bool: pass
+    def __ne__(self, x: object) -> bool: pass
+    def __lt__(self, x: 'int') -> bool: pass
+    def __le__(self, x: 'int') -> bool: pass
+    def __gt__(self, x: 'int') -> bool: pass
+    def __ge__(self, x: 'int') -> bool: pass
+
+class float: pass
+
+class BaseException: pass
+
+def __print(a1=None, a2=None, a3=None, a4=None): pass
diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi
new file mode 100644
index 0000000..4b5611b
--- /dev/null
+++ b/test-data/unit/fixtures/primitives.pyi
@@ -0,0 +1,21 @@
+# builtins stub with non-generic primitive types
+
+class object:
+    def __init__(self) -> None: pass
+    def __str__(self) -> str: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class int:
+    def __add__(self, i: int) -> int: pass
+class float: pass
+class complex: pass
+class bool(int): pass
+class str:
+    def __add__(self, s: str) -> str: pass
+    def format(self, *args) -> str: pass
+class bytes: pass
+class bytearray: pass
+class tuple: pass
+class function: pass
diff --git a/test-data/unit/fixtures/property.pyi b/test-data/unit/fixtures/property.pyi
new file mode 100644
index 0000000..929317e
--- /dev/null
+++ b/test-data/unit/fixtures/property.pyi
@@ -0,0 +1,20 @@
+import typing
+
+_T = typing.TypeVar('_T')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x: typing.Any) -> None: pass
+
+class function: pass
+
+property = object() # Dummy definition.
+
+class int: pass
+class str: pass
+class bytes: pass
+class bool: pass
+
+class tuple(typing.Generic[_T]): pass
diff --git a/test-data/unit/fixtures/python2.pyi b/test-data/unit/fixtures/python2.pyi
new file mode 100644
index 0000000..61e48be
--- /dev/null
+++ b/test-data/unit/fixtures/python2.pyi
@@ -0,0 +1,18 @@
+from typing import Generic, Iterable, TypeVar
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class function: pass
+
+class int: pass
+class str: pass
+class unicode: pass
+
+T = TypeVar('T')
+class list(Iterable[T], Generic[T]): pass
+
+# Definition of None is implicit
diff --git a/test-data/unit/fixtures/set.pyi b/test-data/unit/fixtures/set.pyi
new file mode 100644
index 0000000..cb8bbcf
--- /dev/null
+++ b/test-data/unit/fixtures/set.pyi
@@ -0,0 +1,21 @@
+# Builtins stub used in set-related test cases.
+
+from typing import TypeVar, Generic, Iterator, Iterable, Set
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type: pass
+class tuple: pass
+class function: pass
+
+class int: pass
+class str: pass
+
+class set(Iterable[T], Generic[T]):
+    def __iter__(self) -> Iterator[T]: pass
+    def add(self, x: T) -> None: pass
+    def discard(self, x: T) -> None: pass
+    def update(self, x: Set[T]) -> None: pass
diff --git a/test-data/unit/fixtures/slice.pyi b/test-data/unit/fixtures/slice.pyi
new file mode 100644
index 0000000..c01ffbb
--- /dev/null
+++ b/test-data/unit/fixtures/slice.pyi
@@ -0,0 +1,13 @@
+# Builtins stub used in slicing test cases.
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class tuple: pass
+class function: pass
+
+class int: pass
+class str: pass
+
+class slice: pass
diff --git a/test-data/unit/fixtures/staticmethod.pyi b/test-data/unit/fixtures/staticmethod.pyi
new file mode 100644
index 0000000..5f1013f
--- /dev/null
+++ b/test-data/unit/fixtures/staticmethod.pyi
@@ -0,0 +1,19 @@
+import typing
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class function: pass
+
+staticmethod = object() # Dummy definition.
+
+class int:
+    @staticmethod
+    def from_bytes(bytes: bytes, byteorder: str) -> int: pass
+
+class str: pass
+class unicode: pass
+class bytes: pass
diff --git a/test-data/unit/fixtures/transform.pyi b/test-data/unit/fixtures/transform.pyi
new file mode 100644
index 0000000..afdc2bf
--- /dev/null
+++ b/test-data/unit/fixtures/transform.pyi
@@ -0,0 +1,30 @@
+# Builtins stubs used implicitly in program transformation test cases.
+
+class object:
+    def __init__(self) -> None: pass
+
+class type: pass
+
+# str is handy for debugging; allows outputting messages.
+class str: pass
+
+# Primitive types int/float have special coercion behaviour (they may have
+# a different representation from ordinary values).
+
+class int: pass
+
+class float: pass
+
+
+# The functions below are special functions used in test cases; their
+# implementations are actually in the __dynchk module, but they are defined
+# here so that the semantic analyzer and the type checker are happy without
+# having to analyze the entire __dynchk module all the time.
+#
+# The transformation implementation has special case handling for these
+# functions; it's a bit ugly but it works for now.
+
+def __print(a1=None, a2=None, a3=None, a4=None):
+    # Do not use *args since this would require list and break many test
+    # cases.
+    pass
diff --git a/test-data/unit/fixtures/tuple-simple.pyi b/test-data/unit/fixtures/tuple-simple.pyi
new file mode 100644
index 0000000..b195dfa
--- /dev/null
+++ b/test-data/unit/fixtures/tuple-simple.pyi
@@ -0,0 +1,20 @@
+# Builtins stub used in some tuple-related test cases.
+#
+# This is a simpler version of tuple.py which is useful
+# and makes some test cases easier to write/debug.
+
+from typing import Iterable, TypeVar, Generic
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class tuple(Generic[T]):
+    def __getitem__(self, x: int) -> T: pass
+class function: pass
+
+# We need int for indexing tuples.
+class int: pass
+class str: pass # For convenience
diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi
new file mode 100644
index 0000000..4e53d12
--- /dev/null
+++ b/test-data/unit/fixtures/tuple.pyi
@@ -0,0 +1,33 @@
+# Builtins stub used in tuple-related test cases.
+
+from typing import Iterable, Iterator, TypeVar, Generic, Sequence, Any
+
+Tco = TypeVar('Tco', covariant=True)
+
+class object:
+    def __init__(self): pass
+
+class type:
+    def __init__(self, *a) -> None: pass
+    def __call__(self, *a) -> object: pass
+class tuple(Sequence[Tco], Generic[Tco]):
+    def __iter__(self) -> Iterator[Tco]: pass
+    def __getitem__(self, x: int) -> Tco: pass
+    def count(self, obj: Any) -> int: pass
+class function: pass
+
+# We need int and slice for indexing tuples.
+class int: pass
+class slice: pass
+class bool: pass
+class str: pass # For convenience
+class unicode: pass
+
+T = TypeVar('T')
+
+class list(Sequence[T], Generic[T]): pass
+def isinstance(x: object, t: type) -> bool: pass
+
+def sum(iterable: Iterable[T], start: T = None) -> T: pass
+
+class BaseException: pass
diff --git a/test-data/unit/fixtures/type.pyi b/test-data/unit/fixtures/type.pyi
new file mode 100644
index 0000000..7600021
--- /dev/null
+++ b/test-data/unit/fixtures/type.pyi
@@ -0,0 +1,21 @@
+# builtins stub used in type-related test cases.
+
+from typing import Generic, TypeVar, List
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self) -> None: pass
+    def __str__(self) -> 'str': pass
+
+class list(Generic[T]): pass
+
+class type:
+    def mro(self) -> List['type']: pass
+
+class tuple: pass
+class function: pass
+class bool: pass
+class int: pass
+class str: pass
+class unicode: pass
\ No newline at end of file
diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi
new file mode 100644
index 0000000..d43e340
--- /dev/null
+++ b/test-data/unit/fixtures/typing-full.pyi
@@ -0,0 +1,120 @@
+# More complete stub for typing module.
+#
+# Use [typing fixtures/typing-full.pyi] to use this instead of lib-stub/typing.pyi
+# in a particular test case.
+#
+# Many of the definitions have special handling in the type checker, so they
+# can just be initialized to anything.
+
+from abc import abstractmethod
+
+class GenericMeta(type): pass
+
+cast = 0
+overload = 0
+Any = 0
+Union = 0
+Optional = 0
+TypeVar = 0
+Generic = 0
+Tuple = 0
+Callable = 0
+_promote = 0
+NamedTuple = 0
+Type = 0
+no_type_check = 0
+ClassVar = 0
+NoReturn = 0
+NewType = 0
+
+# Type aliases.
+List = 0
+Dict = 0
+Set = 0
+
+T = TypeVar('T')
+U = TypeVar('U')
+V = TypeVar('V')
+S = TypeVar('S')
+
+class Container(Generic[T]):
+    @abstractmethod
+    # Use int because bool isn't in the default test builtins
+    def __contains__(self, arg: T) -> int: pass
+
+class Sized:
+    @abstractmethod
+    def __len__(self) -> int: pass
+
+class Iterable(Generic[T]):
+    @abstractmethod
+    def __iter__(self) -> 'Iterator[T]': pass
+
+class Iterator(Iterable[T], Generic[T]):
+    @abstractmethod
+    def __next__(self) -> T: pass
+
+class Generator(Iterator[T], Generic[T, U, V]):
+    @abstractmethod
+    def send(self, value: U) -> T: pass
+
+    @abstractmethod
+    def throw(self, typ: Any, val: Any=None, tb=None) -> None: pass
+
+    @abstractmethod
+    def close(self) -> None: pass
+
+    @abstractmethod
+    def __iter__(self) -> 'Generator[T, U, V]': pass
+
+class AsyncGenerator(AsyncIterator[T], Generic[T, U]):
+    @abstractmethod
+    def __anext__(self) -> Awaitable[T]: pass
+
+    @abstractmethod
+    def asend(self, value: U) -> Awaitable[T]: pass
+
+    @abstractmethod
+    def athrow(self, typ: Any, val: Any=None, tb: Any=None) -> Awaitable[T]: pass
+
+    @abstractmethod
+    def aclose(self) -> Awaitable[T]: pass
+
+    @abstractmethod
+    def __aiter__(self) -> 'AsyncGenerator[T, U]': pass
+
+class Awaitable(Generic[T]):
+    @abstractmethod
+    def __await__(self) -> Generator[Any, Any, T]: pass
+
+class AwaitableGenerator(Generator[T, U, V], Awaitable[V], Generic[T, U, V, S]):
+    pass
+
+class AsyncIterable(Generic[T]):
+    @abstractmethod
+    def __aiter__(self) -> 'AsyncIterator[T]': pass
+
+class AsyncIterator(AsyncIterable[T], Generic[T]):
+    def __aiter__(self) -> 'AsyncIterator[T]': return self
+    @abstractmethod
+    def __anext__(self) -> Awaitable[T]: pass
+
+class Sequence(Iterable[T], Generic[T]):
+    @abstractmethod
+    def __getitem__(self, n: Any) -> T: pass
+
+class Mapping(Iterable[T], Sized, Generic[T, U]):
+    @overload
+    def get(self, k: T) -> Optional[U]: ...
+    @overload
+    def get(self, k: T, default: Union[U, V]) -> Union[U, V]: ...
+    def values(self) -> Iterable[U]: pass  # Approximate return type
+    def __len__(self) -> int: ...
+
+class MutableMapping(Mapping[T, U]): pass
+
+class ContextManager(Generic[T]):
+    def __enter__(self) -> T: ...
+    def __exit__(self, exc_type, exc_value, traceback): ...
+
+TYPE_CHECKING = 1
diff --git a/test-data/unit/fixtures/union.pyi b/test-data/unit/fixtures/union.pyi
new file mode 100644
index 0000000..78a41f9
--- /dev/null
+++ b/test-data/unit/fixtures/union.pyi
@@ -0,0 +1,18 @@
+# Builtins stub used in tuple-related test cases.
+
+from isinstance import isinstance
+from typing import Iterable, TypeVar
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class function: pass
+
+# Current tuple types get special treatment in the type checker, thus there
+# is no need for type arguments here.
+class tuple: pass
+
+# We need int for indexing tuples.
+class int: pass
+class str: pass # For convenience
diff --git a/test-data/unit/lib-stub/__builtin__.pyi b/test-data/unit/lib-stub/__builtin__.pyi
new file mode 100644
index 0000000..9a18087
--- /dev/null
+++ b/test-data/unit/lib-stub/__builtin__.pyi
@@ -0,0 +1,27 @@
+Any = 0
+
+class object:
+    def __init__(self):
+        # type: () -> None
+        pass
+
+class type:
+    def __init__(self, x):
+        # type: (Any) -> None
+        pass
+
+# These are provided here for convenience.
+class int: pass
+class float: pass
+
+class str: pass
+class unicode: pass
+
+class tuple: pass
+class function: pass
+
+class ellipsis: pass
+
+def print(*args, end=''): pass
+
+# Definition of None is implicit
diff --git a/test-data/unit/lib-stub/abc.pyi b/test-data/unit/lib-stub/abc.pyi
new file mode 100644
index 0000000..9208f42
--- /dev/null
+++ b/test-data/unit/lib-stub/abc.pyi
@@ -0,0 +1,3 @@
+class ABCMeta(type): pass
+abstractmethod = object()
+abstractproperty = object()
diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi
new file mode 100644
index 0000000..457bea0
--- /dev/null
+++ b/test-data/unit/lib-stub/builtins.pyi
@@ -0,0 +1,21 @@
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x: object) -> None: pass
+
+# These are provided here for convenience.
+class int:
+    def __add__(self, other: 'int') -> 'int': pass
+class float: pass
+
+class str:
+    def __add__(self, other: 'str') -> 'str': pass
+class bytes: pass
+
+class tuple: pass
+class function: pass
+
+class ellipsis: pass
+
+# Definition of None is implicit
diff --git a/test-data/unit/lib-stub/collections.pyi b/test-data/unit/lib-stub/collections.pyi
new file mode 100644
index 0000000..00b7cea
--- /dev/null
+++ b/test-data/unit/lib-stub/collections.pyi
@@ -0,0 +1,3 @@
+import typing
+
+namedtuple = object()
diff --git a/test-data/unit/lib-stub/enum.pyi b/test-data/unit/lib-stub/enum.pyi
new file mode 100644
index 0000000..facf519
--- /dev/null
+++ b/test-data/unit/lib-stub/enum.pyi
@@ -0,0 +1,28 @@
+from typing import Any, TypeVar, Union
+
+class Enum:
+    def __new__(cls, value: Any) -> None: pass
+    def __repr__(self) -> str: pass
+    def __str__(self) -> str: pass
+    def __format__(self, format_spec: str) -> str: pass
+    def __hash__(self) -> Any: pass
+    def __reduce_ex__(self, proto: Any) -> Any: pass
+
+    name = ''  # type: str
+    value = None  # type: Any
+
+class IntEnum(int, Enum):
+    value = 0  # type: int
+
+_T = TypeVar('_T')
+
+def unique(enumeration: _T) -> _T: pass
+
+# In reality Flag and IntFlag are 3.6 only
+
+class Flag(Enum):
+    def __or__(self: _T, other: Union[int, _T]) -> _T: pass
+
+
+class IntFlag(int, Flag):
+    def __and__(self: _T, other: Union[int, _T]) -> _T: pass
diff --git a/test-data/unit/lib-stub/mypy_extensions.pyi b/test-data/unit/lib-stub/mypy_extensions.pyi
new file mode 100644
index 0000000..a604c96
--- /dev/null
+++ b/test-data/unit/lib-stub/mypy_extensions.pyi
@@ -0,0 +1,21 @@
+from typing import Dict, Type, TypeVar, Optional, Any
+
+_T = TypeVar('_T')
+
+
+def Arg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
+
+def DefaultArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
+
+def NamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
+
+def DefaultNamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
+
+def VarArg(type: _T = ...) -> _T: ...
+
+def KwArg(type: _T = ...) -> _T: ...
+
+
+def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) -> Type[dict]: ...
+
+class NoReturn: pass
diff --git a/test-data/unit/lib-stub/six.pyi b/test-data/unit/lib-stub/six.pyi
new file mode 100644
index 0000000..a6faa32
--- /dev/null
+++ b/test-data/unit/lib-stub/six.pyi
@@ -0,0 +1,2 @@
+from typing import Type
+def with_metaclass(mcls: Type[type], *args: type) -> type: pass
diff --git a/test-data/unit/lib-stub/sys.pyi b/test-data/unit/lib-stub/sys.pyi
new file mode 100644
index 0000000..3959cb0
--- /dev/null
+++ b/test-data/unit/lib-stub/sys.pyi
@@ -0,0 +1,2 @@
+version_info = (0, 0, 0, '', 0)
+platform = ''
diff --git a/test-data/unit/lib-stub/types.pyi b/test-data/unit/lib-stub/types.pyi
new file mode 100644
index 0000000..02113ae
--- /dev/null
+++ b/test-data/unit/lib-stub/types.pyi
@@ -0,0 +1,10 @@
+from typing import TypeVar
+
+_T = TypeVar('_T')
+
+def coroutine(func: _T) -> _T: pass
+
+class bool: ...
+
+class ModuleType:
+    __file__ = ... # type: str
diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi
new file mode 100644
index 0000000..02412c7
--- /dev/null
+++ b/test-data/unit/lib-stub/typing.pyi
@@ -0,0 +1,64 @@
+# Stub for typing module. Many of the definitions have special handling in
+# the type checker, so they can just be initialized to anything.
+
+from abc import abstractmethod
+
+class GenericMeta(type): pass
+
+cast = 0
+overload = 0
+Any = 0
+Union = 0
+Optional = 0
+TypeVar = 0
+Generic = 0
+Tuple = 0
+Callable = 0
+_promote = 0
+NamedTuple = 0
+Type = 0
+no_type_check = 0
+ClassVar = 0
+NoReturn = 0
+NewType = 0
+
+# Type aliases.
+List = 0
+Dict = 0
+Set = 0
+
+T = TypeVar('T')
+U = TypeVar('U')
+V = TypeVar('V')
+S = TypeVar('S')
+
+class Container(Generic[T]):
+    @abstractmethod
+    # Use int because bool isn't in the default test builtins
+    def __contains__(self, arg: T) -> int: pass
+
+class Sized:
+    @abstractmethod
+    def __len__(self) -> int: pass
+
+class Iterable(Generic[T]):
+    @abstractmethod
+    def __iter__(self) -> 'Iterator[T]': pass
+
+class Iterator(Iterable[T], Generic[T]):
+    @abstractmethod
+    def __next__(self) -> T: pass
+
+class Generator(Iterator[T], Generic[T, U, V]):
+    @abstractmethod
+    def __iter__(self) -> 'Generator[T, U, V]': pass
+
+class Sequence(Iterable[T], Generic[T]):
+    @abstractmethod
+    def __getitem__(self, n: Any) -> T: pass
+
+class Mapping(Generic[T, U]): pass
+
+class MutableMapping(Generic[T, U]): pass
+
+TYPE_CHECKING = 1
diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test
new file mode 100644
index 0000000..a6d2a42
--- /dev/null
+++ b/test-data/unit/merge.test
@@ -0,0 +1,608 @@
+-- Test cases for AST merge (user for fine-grained incremental checking)
+
+[case testFunction]
+import target
+[file target.py]
+def f() -> int:
+    pass
+[file target.py.next]
+def f() -> int:
+    pass
+[out]
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  FuncDef:1<2>(
+    f
+    def () -> builtins.int<3>
+    Block:1<4>(
+      PassStmt:2<5>())))
+==>
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  FuncDef:1<2>(
+    f
+    def () -> builtins.int<3>
+    Block:1<6>(
+      PassStmt:2<7>())))
+
+[case testClass]
+import target
+[file target.py]
+class A:
+    def f(self, x: str) -> int:
+        pass
+[file target.py.next]
+class A:
+    def f(self, x: int) -> str:
+        pass
+[out]
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  ClassDef:1<2>(
+    A
+    FuncDef:2<3>(
+      f
+      Args(
+        Var(self)
+        Var(x))
+      def (self: target.A<4>, x: builtins.str<5>) -> builtins.int<6>
+      Block:2<7>(
+        PassStmt:3<8>()))))
+==>
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  ClassDef:1<9>(
+    A
+    FuncDef:2<3>(
+      f
+      Args(
+        Var(self)
+        Var(x))
+      def (self: target.A<4>, x: builtins.int<6>) -> builtins.str<5>
+      Block:2<10>(
+        PassStmt:3<11>()))))
+
+[case testClass_typeinfo]
+import target
+[file target.py]
+class A:
+    def f(self, x: str) -> int: pass
+    def g(self, x: str) -> int: pass
+[file target.py.next]
+class A:
+    def f(self, x: int) -> str: pass
+    def h(self, x: int) -> str: pass
+[out]
+TypeInfo<0>(
+  Name(target.A)
+  Bases(builtins.object<1>)
+  Mro(target.A<0>, builtins.object<1>)
+  Names(
+    f<2>
+    g<3>))
+==>
+TypeInfo<0>(
+  Name(target.A)
+  Bases(builtins.object<1>)
+  Mro(target.A<0>, builtins.object<1>)
+  Names(
+    f<2>
+    h<4>))
+
+[case testConstructInstance]
+import target
+[file target.py]
+class A:
+    def f(self) -> B:
+        return B()
+class B: pass
+[file target.py.next]
+class B: pass
+class A:
+    def f(self) -> B:
+        1
+        return B()
+[out]
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  ClassDef:1<2>(
+    A
+    FuncDef:2<3>(
+      f
+      Args(
+        Var(self))
+      def (self: target.A<4>) -> target.B<5>
+      Block:2<6>(
+        ReturnStmt:3<7>(
+          CallExpr:3<8>(
+            NameExpr(B [target.B<5>])
+            Args())))))
+  ClassDef:4<9>(
+    B
+    PassStmt:4<10>()))
+==>
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  ClassDef:1<11>(
+    B
+    PassStmt:1<12>())
+  ClassDef:2<13>(
+    A
+    FuncDef:3<3>(
+      f
+      Args(
+        Var(self))
+      def (self: target.A<4>) -> target.B<5>
+      Block:3<14>(
+        ExpressionStmt:4<15>(
+          IntExpr(1))
+        ReturnStmt:5<16>(
+          CallExpr:5<17>(
+            NameExpr(B [target.B<5>])
+            Args()))))))
+
+[case testCallMethod]
+import target
+[file target.py]
+class A:
+    def f(self) -> None:
+        self.f()
+[file target.py.next]
+class A:
+    def f(self) -> None:
+        self.f()
+[out]
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  ClassDef:1<2>(
+    A
+    FuncDef:2<3>(
+      f
+      Args(
+        Var(self))
+      def (self: target.A<4>)
+      Block:2<5>(
+        ExpressionStmt:3<6>(
+          CallExpr:3<7>(
+            MemberExpr:3<8>(
+              NameExpr(self [l<9>])
+              f)
+            Args()))))))
+==>
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  ClassDef:1<10>(
+    A
+    FuncDef:2<3>(
+      f
+      Args(
+        Var(self))
+      def (self: target.A<4>)
+      Block:2<11>(
+        ExpressionStmt:3<12>(
+          CallExpr:3<13>(
+            MemberExpr:3<14>(
+              NameExpr(self [l<15>])
+              f)
+            Args()))))))
+
+[case testClassAttribute]
+import target
+[file target.py]
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.x
+[file target.py.next]
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.x
+[out]
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  ClassDef:1<2>(
+    A
+    FuncDef:2<3>(
+      f
+      Args(
+        Var(self))
+      def (self: target.A<4>)
+      Block:2<5>(
+        AssignmentStmt:3<6>(
+          MemberExpr:3<8>(
+            NameExpr(self [l<9>])
+            x*<7>)
+          IntExpr(1))
+        ExpressionStmt:4<10>(
+          MemberExpr:4<11>(
+            NameExpr(self [l<9>])
+            x))))))
+==>
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  ClassDef:1<12>(
+    A
+    FuncDef:2<3>(
+      f
+      Args(
+        Var(self))
+      def (self: target.A<4>)
+      Block:2<13>(
+        AssignmentStmt:3<14>(
+          MemberExpr:3<15>(
+            NameExpr(self [l<16>])
+            x*<7>)
+          IntExpr(1))
+        ExpressionStmt:4<17>(
+          MemberExpr:4<18>(
+            NameExpr(self [l<16>])
+            x))))))
+
+[case testClassAttribute_typeinfo]
+import target
+[file target.py]
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.x
+        self.y = A()
+[file target.py.next]
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.x
+        self.y = A()
+[out]
+TypeInfo<0>(
+  Name(target.A)
+  Bases(builtins.object<1>)
+  Mro(target.A<0>, builtins.object<1>)
+  Names(
+    f<2>
+    x<3> (builtins.int<4>)
+    y<5> (target.A<0>)))
+==>
+TypeInfo<0>(
+  Name(target.A)
+  Bases(builtins.object<1>)
+  Mro(target.A<0>, builtins.object<1>)
+  Names(
+    f<2>
+    x<3> (builtins.int<4>)
+    y<5> (target.A<0>)))
+
+[case testFunction_symtable]
+import target
+[file target.py]
+def f() -> int:
+    pass
+[file target.py.next]
+def f() -> int:
+    pass
+[out]
+__main__:
+    target: MypyFile<0>
+target:
+    f: FuncDef<1>
+==>
+__main__:
+    target: MypyFile<0>
+target:
+    f: FuncDef<1>
+
+[case testClass_symtable]
+import target
+[file target.py]
+class A: pass
+class B: pass
+[file target.py.next]
+class A: pass
+class C: pass
+[out]
+__main__:
+    target: MypyFile<0>
+target:
+    A: TypeInfo<1>
+    B: TypeInfo<2>
+==>
+__main__:
+    target: MypyFile<0>
+target:
+    A: TypeInfo<1>
+    C: TypeInfo<3>
+
+[case testTopLevelExpression]
+import target
+[file target.py]
+class A: pass
+A()
+[file target.py.next]
+class A: pass
+class B: pass
+A()
+B()
+[out]
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  ClassDef:1<2>(
+    A
+    PassStmt:1<3>())
+  ExpressionStmt:2<4>(
+    CallExpr:2<5>(
+      NameExpr(A [target.A<6>])
+      Args())))
+==>
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  ClassDef:1<7>(
+    A
+    PassStmt:1<8>())
+  ClassDef:2<9>(
+    B
+    PassStmt:2<10>())
+  ExpressionStmt:3<11>(
+    CallExpr:3<12>(
+      NameExpr(A [target.A<6>])
+      Args()))
+  ExpressionStmt:4<13>(
+    CallExpr:4<14>(
+      NameExpr(B [target.B<15>])
+      Args())))
+
+[case testExpression_types]
+import target
+[file target.py]
+class A: pass
+def f(a: A) -> None:
+    1
+    a
+[file target.py.next]
+class A: pass
+def f(a: A) -> None:
+    a
+    1
+[out]
+## target
+IntExpr:3: builtins.int<0>
+NameExpr:4: target.A<1>
+==>
+## target
+NameExpr:3: target.A<1>
+IntExpr:4: builtins.int<0>
+
+[case testClassAttribute_types]
+import target
+[file target.py]
+class A:
+    def f(self) -> None:
+        self.x = A()
+        self.x
+        self.y = 1
+        self.y
+[file target.py.next]
+class A:
+    def f(self) -> None:
+        self.y = 1
+        self.y
+        self.x = A()
+        self.x
+[out]
+## target
+CallExpr:3: target.A<0>
+MemberExpr:3: target.A<0>
+NameExpr:3: def () -> target.A<0>
+NameExpr:3: target.A<0>
+MemberExpr:4: target.A<0>
+NameExpr:4: target.A<0>
+IntExpr:5: builtins.int<1>
+MemberExpr:5: builtins.int<1>
+NameExpr:5: target.A<0>
+MemberExpr:6: builtins.int<1>
+NameExpr:6: target.A<0>
+==>
+## target
+IntExpr:3: builtins.int<1>
+MemberExpr:3: builtins.int<1>
+NameExpr:3: target.A<0>
+MemberExpr:4: builtins.int<1>
+NameExpr:4: target.A<0>
+CallExpr:5: target.A<0>
+MemberExpr:5: target.A<0>
+NameExpr:5: def () -> target.A<0>
+NameExpr:5: target.A<0>
+MemberExpr:6: target.A<0>
+NameExpr:6: target.A<0>
+
+[case testMethod_types]
+import target
+[file target.py]
+class A:
+    def f(self) -> A:
+        return self.f()
+[file target.py.next]
+class A:
+    # Extra line to change line numbers
+    def f(self) -> A:
+        return self.f()
+[out]
+## target
+CallExpr:3: target.A<0>
+MemberExpr:3: def () -> target.A<0>
+NameExpr:3: target.A<0>
+==>
+## target
+CallExpr:4: target.A<0>
+MemberExpr:4: def () -> target.A<0>
+NameExpr:4: target.A<0>
+
+[case testRenameFunction]
+import target
+[file target.py]
+def f() -> int: pass
+[file target.py.next]
+def g() -> int: pass
+[out]
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  FuncDef:1<2>(
+    f
+    def () -> builtins.int<3>
+    Block:1<4>(
+      PassStmt:1<5>())))
+==>
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  FuncDef:1<6>(
+    g
+    def () -> builtins.int<3>
+    Block:1<7>(
+      PassStmt:1<8>())))
+
+[case testRenameFunction_symtable]
+import target
+[file target.py]
+def f() -> int: pass
+[file target.py.next]
+def g() -> int: pass
+[out]
+__main__:
+    target: MypyFile<0>
+target:
+    f: FuncDef<1>
+==>
+__main__:
+    target: MypyFile<0>
+target:
+    g: FuncDef<2>
+
+[case testMergeWithBaseClass_typeinfo]
+import target
+[file target.py]
+class A: pass
+class B(A):
+    def f(self) -> None: pass
+[file target.py.next]
+class C: pass
+class A: pass
+class B(A):
+    def f(self) -> None: pass
+[out]
+TypeInfo<0>(
+  Name(target.A)
+  Bases(builtins.object<1>)
+  Mro(target.A<0>, builtins.object<1>)
+  Names())
+TypeInfo<2>(
+  Name(target.B)
+  Bases(target.A<0>)
+  Mro(target.B<2>, target.A<0>, builtins.object<1>)
+  Names(
+    f<3>))
+==>
+TypeInfo<0>(
+  Name(target.A)
+  Bases(builtins.object<1>)
+  Mro(target.A<0>, builtins.object<1>)
+  Names())
+TypeInfo<2>(
+  Name(target.B)
+  Bases(target.A<0>)
+  Mro(target.B<2>, target.A<0>, builtins.object<1>)
+  Names(
+    f<3>))
+TypeInfo<4>(
+  Name(target.C)
+  Bases(builtins.object<1>)
+  Mro(target.C<4>, builtins.object<1>)
+  Names())
+
+[case testModuleAttribute]
+import target
+[file target.py]
+x = 1
+[file target.py.next]
+x = 2
+[out]
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  AssignmentStmt:1<2>(
+    NameExpr(x [target.x<3>])
+    IntExpr(1)
+    builtins.int<4>))
+==>
+MypyFile:1<0>(
+  Import:1(target))
+MypyFile:1<1>(
+  tmp/target.py
+  AssignmentStmt:1<5>(
+    NameExpr(x [target.x<3>])
+    IntExpr(2)
+    builtins.int<4>))
+
+[case testNestedClassMethod_typeinfo]
+import target
+[file target.py]
+class A:
+    class B:
+        def f(self) -> None: pass
+[file target.py.next]
+class A:
+    class B:
+        def f(self) -> None: pass
+[out]
+TypeInfo<0>(
+  Name(target.A)
+  Bases(builtins.object<1>)
+  Mro(target.A<0>, builtins.object<1>)
+  Names(
+    B<2>))
+TypeInfo<2>(
+  Name(target.A.B)
+  Bases(builtins.object<1>)
+  Mro(target.A.B<2>, builtins.object<1>)
+  Names(
+    f<3>))
+==>
+TypeInfo<0>(
+  Name(target.A)
+  Bases(builtins.object<1>)
+  Mro(target.A<0>, builtins.object<1>)
+  Names(
+    B<2>))
+TypeInfo<2>(
+  Name(target.A.B)
+  Bases(builtins.object<1>)
+  Mro(target.A.B<2>, builtins.object<1>)
+  Names(
+    f<3>))
diff --git a/test-data/unit/parse-errors.test b/test-data/unit/parse-errors.test
new file mode 100644
index 0000000..22a3c5c
--- /dev/null
+++ b/test-data/unit/parse-errors.test
@@ -0,0 +1,448 @@
+-- Test cases for parser errors. Each test case consists of two sections.
+-- The first section contains [case NAME] followed by the input code, while
+-- the second section contains [out] followed by the output from the parser.
+--
+-- The input file name in errors is "file".
+--
+-- Comments starting with "--" in this file will be ignored, except for lines
+-- starting with "----" that are not ignored. The first two dashes of these
+-- lines are interpreted as escapes and removed.
+
+[case testInvalidFunction]
+def f()
+  pass
+[out]
+file:1: error: invalid syntax
+
+[case testMissingIndent]
+if x:
+1
+[out]
+file:2: error: invalid syntax
+
+[case testUnexpectedIndent]
+1
+ 2
+[out]
+file:2: error: unexpected indent
+
+[case testInconsistentIndent]
+if x:
+  1
+   1
+[out]
+file:3: error: unexpected indent
+
+[case testInconsistentIndent2]
+if x:
+   1
+  1
+[out]
+file:3: error: unindent does not match any outer indentation level
+
+[case testInvalidBinaryOp]
+1>
+a*
+a+1*
+[out]
+file:1: error: invalid syntax
+
+[case testDoubleStar]
+**a
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidSuperClass]
+class A(C[):
+  pass
+[out]
+file:1: error: invalid syntax
+
+[case testMissingSuperClass]
+class A(:
+  pass
+[out]
+file:1: error: invalid syntax
+
+[case testUnexpectedEof]
+if 1:
+[out]
+file:1: error: unexpected EOF while parsing
+
+[case testInvalidKeywordArguments1]
+f(x=y, z)
+[out]
+file:1: error: positional argument follows keyword argument
+
+[case testInvalidKeywordArguments2]
+f(**x, y)
+[out]
+file:1: error: positional argument follows keyword argument unpacking
+
+[case testInvalidBareAsteriskAndVarArgs2]
+def f(*x: A, *) -> None: pass
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidBareAsteriskAndVarArgs3]
+def f(*, *x: A) -> None: pass
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidBareAsteriskAndVarArgs4]
+def f(*, **x: A) -> None: pass
+[out]
+file:1: error: named arguments must follow bare *
+
+[case testInvalidBareAsterisk1]
+def f(*) -> None: pass
+[out]
+file:1: error: named arguments must follow bare *
+
+[case testInvalidBareAsterisk2]
+def f(x, *) -> None: pass
+[out]
+file:1: error: named arguments must follow bare *
+
+[case testInvalidFuncDefArgs1]
+def f(x = y, x): pass
+[out]
+file:1: error: non-default argument follows default argument
+
+[case testInvalidFuncDefArgs3]
+def f(**x, y):
+   pass
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidFuncDefArgs4]
+def f(**x, y=x):
+    pass
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidStringLiteralType]
+def f(x:
+     'A['
+     ) -> None: pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidStringLiteralType2]
+def f(x:
+      'A B'
+      ) -> None: pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidTypeComment]
+0
+x = 0 # type: A A
+[out]
+file:2: error: syntax error in type comment
+
+[case testInvalidTypeComment2]
+0
+x = 0 # type: A[
+[out]
+file:2: error: syntax error in type comment
+
+[case testInvalidTypeComment3]
+0
+x = 0 # type:
+[out]
+file:2: error: syntax error in type comment
+
+[case testInvalidTypeComment4]
+0
+x = 0 # type: *
+[out]
+file:2: error: syntax error in type comment
+
+[case testInvalidMultilineLiteralType]
+def f() -> "A\nB": pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment1]
+def f(): # type: x
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment2]
+def f(): # type:
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment3]
+def f(): # type: (
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment4]
+def f(): # type: (.
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment5]
+def f(): # type: (x
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment6]
+def f(): # type: (x)
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment7]
+def f(): # type: (x) -
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment8]
+def f(): # type: (x) ->
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment9]
+def f(): # type: (x) -> .
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment10]
+def f(): # type: (x) -> x x
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testDuplicateSignatures1]
+def f() -> None: # type: () -> None
+  pass
+def f(): # type: () -> None
+    pass
+[out]
+file:1: error: Function has duplicate type signatures
+
+[case testDuplicateSignatures2]
+def f(x, y: Z): # type: (x, y) -> z
+  pass
+[out]
+file:1: error: Function has duplicate type signatures
+
+[case testTooManyTypes]
+def f(x, y): # type: (X, Y, Z) -> z
+  pass
+[out]
+file:1: error: Type signature has too many arguments
+
+[case testTooFewTypes]
+def f(x, y): # type: (X) -> z
+  pass
+[out]
+file:1: error: Type signature has too few arguments
+
+[case testCommentFunctionAnnotationVarArgMispatch-skip]
+# see mypy issue #1997
+def f(x): # type: (*X) -> Y
+    pass
+def g(*x): # type: (X) -> Y
+    pass
+[out]
+file:1: error: Inconsistent use of '*' in function signature
+file:3: error: Inconsistent use of '*' in function signature
+
+[case testCommentFunctionAnnotationVarArgMispatch2-skip]
+# see mypy issue #1997
+def f(*x, **y): # type: (**X, *Y) -> Z
+    pass
+def g(*x, **y): # type: (*X, *Y) -> Z
+    pass
+[out]
+file:1: error: Inconsistent use of '*' in function signature
+file:3: error: syntax error in type comment
+file:3: error: Inconsistent use of '*' in function signature
+file:3: error: Inconsistent use of '**' in function signature
+
+[case testPrintStatementInPython3-skip]
+print 1
+[out]
+file:1: error: Missing parentheses in call to 'print'
+
+[case testInvalidConditionInConditionalExpression]
+1 if 2, 3 else 4
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidConditionInConditionalExpression2]
+1 if x for y in z else 4
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidConditionInConditionalExpression2]
+1 if x else for y in z
+[out]
+file:1: error: invalid syntax
+
+[case testYieldFromNotRightParameter]
+def f():
+    yield from
+[out]
+file:2: error: invalid syntax
+
+[case testYieldFromAfterReturn]
+def f():
+    return yield from h()
+[out]
+file:2: error: invalid syntax
+
+[case testImportDotModule]
+import .x
+[out]
+file:1: error: invalid syntax
+
+[case testImportDot]
+import .
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidFunctionName]
+def while(): pass
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidEllipsis1]
+...0
+..._
+...a
+[out]
+file:1: error: invalid syntax
+
+[case testBlockStatementInSingleLineIf]
+if 1: if 2: pass
+[out]
+file:1: error: invalid syntax
+
+[case testBlockStatementInSingleLineIf2]
+if 1: while 2: pass
+[out]
+file:1: error: invalid syntax
+
+[case testBlockStatementInSingleLineIf3]
+if 1: for x in y: pass
+[out]
+file:1: error: invalid syntax
+
+[case testUnexpectedEllipsis]
+a = a...
+[out]
+file:1: error: invalid syntax
+
+[case testParseErrorBeforeUnicodeLiteral]
+x u'y'
+[out]
+file:1: error: invalid syntax
+
+[case testParseErrorInExtendedSlicing]
+x[:,
+[out]
+file:1: error: unexpected EOF while parsing
+
+[case testParseErrorInExtendedSlicing2]
+x[:,::
+[out]
+file:1: error: unexpected EOF while parsing
+
+[case testParseErrorInExtendedSlicing3]
+x[:,:
+[out]
+file:1: error: unexpected EOF while parsing
+
+[case testPython2OctalIntLiteralInPython3]
+0377
+[out]
+file:1: error: invalid token
+
+[case testInvalidEncoding]
+# foo
+# coding: uft-8
+[out]
+file:0: error: unknown encoding: uft-8
+
+[case testInvalidEncoding2]
+# coding=Uft.8
+[out]
+file:0: error: unknown encoding: Uft.8
+
+[case testInvalidEncoding3]
+#!/usr/bin python
+# vim: set fileencoding=uft8 :
+[out]
+file:0: error: unknown encoding: uft8
+
+[case testDoubleEncoding]
+# coding: uft8
+# coding: utf8
+# The first coding cookie should be used and fail.
+[out]
+file:0: error: unknown encoding: uft8
+
+[case testDoubleEncoding2]
+# Again the first cookie should be used and fail.
+# coding: uft8
+# coding: utf8
+[out]
+file:0: error: unknown encoding: uft8
+
+[case testLongLiteralInPython3]
+2L
+0x2L
+[out]
+file:1: error: invalid syntax
+
+[case testPython2LegacyInequalityInPython3]
+1 <> 2
+[out]
+file:1: error: invalid syntax
+
+[case testLambdaInListComprehensionInPython3]
+([ 0 for x in 1, 2 if 3 ])
+[out]
+file:1: error: invalid syntax
+
+[case testTupleArgListInPython3]
+def f(x, (y, z)): pass
+[out]
+file:1: error: invalid syntax
+
+[case testBackquoteInPython3]
+`1 + 2`
+[out]
+file:1: error: invalid syntax
+
+[case testSmartQuotes]
+foo = ‘bar’
+[out]
+file:1: error: invalid character in identifier
+
+[case testExceptCommaInPython3]
+try:
+    pass
+except KeyError, IndexError:
+    pass
+[out]
+file:3: error: invalid syntax
+
+[case testLocalVarWithTypeOnNextLine]
+x = 0
+  # type: int
+[out]
+file:2: error: misplaced type annotation
diff --git a/test-data/unit/parse-python2.test b/test-data/unit/parse-python2.test
new file mode 100644
index 0000000..b654f6a
--- /dev/null
+++ b/test-data/unit/parse-python2.test
@@ -0,0 +1,399 @@
+-- Test cases for parser -- Python 2 syntax.
+--
+-- See parse.test for a description of this file format.
+
+[case testEmptyFile]
+[out]
+MypyFile:1()
+
+[case testStringLiterals]
+'bar'
+u'foo'
+ur'foo'
+u'''bar'''
+b'foo'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(bar))
+  ExpressionStmt:2(
+    UnicodeExpr(foo))
+  ExpressionStmt:3(
+    UnicodeExpr(foo))
+  ExpressionStmt:4(
+    UnicodeExpr(bar))
+  ExpressionStmt:5(
+    StrExpr(foo)))
+
+[case testSimplePrint]
+print 1
+print 2, 3
+print (4, 5)
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    IntExpr(1)
+    Newline)
+  PrintStmt:2(
+    IntExpr(2)
+    IntExpr(3)
+    Newline)
+  PrintStmt:3(
+    TupleExpr:3(
+      IntExpr(4)
+      IntExpr(5))
+    Newline))
+
+[case testPrintWithNoArgs]
+print
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    Newline))
+
+[case testPrintWithTarget]
+print >>foo
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    Target(
+      NameExpr(foo))
+    Newline))
+
+[case testPrintWithTargetAndArgs]
+print >>foo, x
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    NameExpr(x)
+    Target(
+      NameExpr(foo))
+    Newline))
+
+[case testPrintWithTargetAndArgsAndTrailingComma]
+print >>foo, x, y,
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    NameExpr(x)
+    NameExpr(y)
+    Target(
+      NameExpr(foo))))
+
+[case testSimpleWithTrailingComma]
+print 1,
+print 2, 3,
+print (4, 5),
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    IntExpr(1))
+  PrintStmt:2(
+    IntExpr(2)
+    IntExpr(3))
+  PrintStmt:3(
+    TupleExpr:3(
+      IntExpr(4)
+      IntExpr(5))))
+
+[case testOctalIntLiteral]
+00
+01
+0377
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IntExpr(0))
+  ExpressionStmt:2(
+    IntExpr(1))
+  ExpressionStmt:3(
+    IntExpr(255)))
+
+[case testLongLiteral-skip]
+# see typed_ast issue #26
+0L
+123L
+012L
+0x123l
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IntExpr(0))
+  ExpressionStmt:2(
+    IntExpr(123))
+  ExpressionStmt:3(
+    IntExpr(10))
+  ExpressionStmt:4(
+    IntExpr(291)))
+
+[case testTryExceptWithComma]
+try:
+    x
+except Exception, e:
+    y
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(x)))
+    NameExpr(Exception)
+    NameExpr(e)
+    Block:3(
+      ExpressionStmt:4(
+        NameExpr(y)))))
+
+[case testTryExceptWithNestedComma]
+try:
+    x
+except (KeyError, IndexError):
+    y
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(x)))
+    TupleExpr:3(
+      NameExpr(KeyError)
+      NameExpr(IndexError))
+    Block:3(
+      ExpressionStmt:4(
+        NameExpr(y)))))
+
+[case testExecStatement]
+exec a
+[out]
+MypyFile:1(
+  ExecStmt:1(
+    NameExpr(a)))
+
+[case testExecStatementWithIn]
+exec a in globals()
+[out]
+MypyFile:1(
+  ExecStmt:1(
+    NameExpr(a)
+    CallExpr:1(
+      NameExpr(globals)
+      Args())))
+
+[case testExecStatementWithInAnd2Expressions]
+exec a in x, y
+[out]
+MypyFile:1(
+  ExecStmt:1(
+    NameExpr(a)
+    NameExpr(x)
+    NameExpr(y)))
+
+[case testEllipsisInExpression_python2]
+x = ... # E: invalid syntax
+[out]
+
+[case testStrLiteralConcatenationWithMixedLiteralTypes]
+u'foo' 'bar'
+'bar' u'foo'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    UnicodeExpr(foobar))
+  ExpressionStmt:2(
+    UnicodeExpr(barfoo)))
+
+[case testLegacyInequality]
+1 <> 2
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ComparisonExpr:1(
+      !=
+      IntExpr(1)
+      IntExpr(2))))
+
+[case testLambdaInListComprehensionInPython2]
+([ 0 for x in 1, 2 if 3 ])
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ListComprehension:1(
+      GeneratorExpr:1(
+        IntExpr(0)
+        NameExpr(x)
+        TupleExpr:1(
+          IntExpr(1)
+          IntExpr(2))
+        IntExpr(3)))))
+
+[case testTupleArgListInPython2]
+def f(x, (y, z)): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(__tuple_arg_2))
+    Block:1(
+      AssignmentStmt:1(
+        TupleExpr:1(
+          NameExpr(y)
+          NameExpr(z))
+        NameExpr(__tuple_arg_2))
+      PassStmt:1())))
+
+[case testTupleArgListWithTwoTupleArgsInPython2]
+def f((x, y), (z, zz)): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(__tuple_arg_1)
+      Var(__tuple_arg_2))
+    Block:1(
+      AssignmentStmt:1(
+        TupleExpr:1(
+          NameExpr(x)
+          NameExpr(y))
+        NameExpr(__tuple_arg_1))
+      AssignmentStmt:1(
+        TupleExpr:1(
+          NameExpr(z)
+          NameExpr(zz))
+        NameExpr(__tuple_arg_2))
+      PassStmt:1())))
+
+[case testTupleArgListWithInitializerInPython2]
+def f((y, z) = (1, 2)): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(__tuple_arg_1))
+    Init(
+      AssignmentStmt:1(
+        NameExpr(__tuple_arg_1)
+        TupleExpr:1(
+          IntExpr(1)
+          IntExpr(2))))
+    Block:1(
+      AssignmentStmt:1(
+        TupleExpr:1(
+          NameExpr(y)
+          NameExpr(z))
+        NameExpr(__tuple_arg_1))
+      PassStmt:1())))
+
+[case testLambdaTupleArgListInPython2]
+lambda (x, y): z
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    LambdaExpr:1(
+      Args(
+        Var(__tuple_arg_1))
+      Block:1(
+        AssignmentStmt:1(
+          TupleExpr:1(
+            NameExpr(x)
+            NameExpr(y))
+          NameExpr(__tuple_arg_1))
+        ReturnStmt:1(
+          NameExpr(z))))))
+
+[case testLambdaSingletonTupleArgListInPython2]
+lambda (x,): z
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    LambdaExpr:1(
+      Args(
+        Var(__tuple_arg_1))
+      Block:1(
+        AssignmentStmt:1(
+          TupleExpr:1(
+            NameExpr(x))
+          NameExpr(__tuple_arg_1))
+        ReturnStmt:1(
+          NameExpr(z))))))
+
+[case testLambdaNoTupleArgListInPython2]
+lambda (x): z
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    LambdaExpr:1(
+      Args(
+        Var(x))
+      Block:1(
+        ReturnStmt:1(
+          NameExpr(z))))))
+
+[case testInvalidExprInTupleArgListInPython2_1]
+def f(x, ()): pass
+[out]
+main:1: error: invalid syntax
+
+[case testInvalidExprInTupleArgListInPython2_2]
+def f(x, (y, x[1])): pass
+[out]
+main:1: error: invalid syntax
+
+[case testListLiteralAsTupleArgInPython2]
+def f(x, [x]): pass
+[out]
+main:1: error: invalid syntax
+
+[case testTupleArgAfterStarArgInPython2]
+def f(*a, (b, c)): pass
+[out]
+main:1: error: invalid syntax
+
+[case testTupleArgAfterStarStarArgInPython2]
+def f(*a, (b, c)): pass
+[out]
+main:1: error: invalid syntax
+
+[case testParenthesizedArgumentInPython2]
+def f(x, (y)): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(y))
+    Block:1(
+      PassStmt:1())))
+
+[case testDuplicateNameInTupleArgList_python2]
+def f(a, (a, b)):
+    pass
+def g((x, (x, y))):
+    pass
+[out]
+main:1: error: Duplicate argument 'a' in function definition
+main:3: error: Duplicate argument 'x' in function definition
+
+[case testBackquotesInPython2]
+`1 + 2`
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    BackquoteExpr:1(
+      OpExpr:1(
+        +
+        IntExpr(1)
+        IntExpr(2)))))
+
+[case testBackquoteSpecialCasesInPython2]
+`1, 2`
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    BackquoteExpr:1(
+      TupleExpr:1(
+        IntExpr(1)
+        IntExpr(2)))))
diff --git a/test-data/unit/parse.test b/test-data/unit/parse.test
new file mode 100644
index 0000000..39cbd58
--- /dev/null
+++ b/test-data/unit/parse.test
@@ -0,0 +1,3386 @@
+-- Test cases for parser. Each test case consists of two sections.
+-- The first section contains [case NAME] followed by the input code, while
+-- the second section contains [out] followed by the output from the parser.
+--
+-- Lines starting with "--" in this file will be ignored, except for lines
+-- starting with "----" that are not ignored. The first two dashes of these
+-- lines are interpreted as escapes and removed.
+
+[case testEmptyFile]
+[out]
+MypyFile:1()
+
+[case testExpressionStatement]
+1
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IntExpr(1)))
+
+[case testAssignment]
+x = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(1)))
+
+[case testExpressionBasics]
+x = f(1, None)
+123 * (2 + x)
+"hello".lower()
+-1.23
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    CallExpr:1(
+      NameExpr(f)
+      Args(
+        IntExpr(1)
+        NameExpr(None))))
+  ExpressionStmt:2(
+    OpExpr:2(
+      *
+      IntExpr(123)
+      OpExpr:2(
+        +
+        IntExpr(2)
+        NameExpr(x))))
+  ExpressionStmt:3(
+    CallExpr:3(
+      MemberExpr:3(
+        StrExpr(hello)
+        lower)
+      Args()))
+  ExpressionStmt:4(
+    UnaryExpr:4(
+      -
+      FloatExpr(1.23))))
+
+[case testSingleQuotedStr]
+''
+'foo'
+'foo\
+bar'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr())
+  ExpressionStmt:2(
+    StrExpr(foo))
+  ExpressionStmt:3(
+    StrExpr(foobar)))
+
+[case testDoubleQuotedStr]
+""
+"foo"
+"foo\
+bar"
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr())
+  ExpressionStmt:2(
+    StrExpr(foo))
+  ExpressionStmt:3(
+    StrExpr(foobar)))
+
+[case testTripleQuotedStr]
+''''''
+'''foo'''
+'''foo\
+bar'''
+'''\nfoo
+bar'''
+'''fo''bar'''
+""""""
+"""foo"""
+"""foo\
+bar"""
+"""\nfoo
+bar"""
+"""fo""bar"""
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr())
+  ExpressionStmt:2(
+    StrExpr(foo))
+  ExpressionStmt:3(
+    StrExpr(foobar))
+  ExpressionStmt:5(
+    StrExpr(\u000afoo\u000abar))
+  ExpressionStmt:6(
+    StrExpr(fo''bar))
+  ExpressionStmt:7(
+    StrExpr())
+  ExpressionStmt:8(
+    StrExpr(foo))
+  ExpressionStmt:9(
+    StrExpr(foobar))
+  ExpressionStmt:11(
+    StrExpr(\u000afoo\u000abar))
+  ExpressionStmt:12(
+    StrExpr(fo""bar)))
+
+[case testRawStr]
+r'x\n\''
+r"x\n\""
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(x\n\'))
+  ExpressionStmt:2(
+    StrExpr(x\n\")))
+--" fix syntax highlight
+
+[case testBytes]
+b'foo'
+b"foo\
+bar"
+br'x\n\''
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    BytesExpr(foo))
+  ExpressionStmt:2(
+    BytesExpr(foobar))
+  ExpressionStmt:3(
+    BytesExpr(x\\n\\')))
+
+[case testEscapesInStrings]
+'\r\n\t\x2f\u123f'
+b'\r\n\t\x2f\u123f'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(\u000d\u000a\u0009/\u123f))
+  ExpressionStmt:2(
+    BytesExpr(\r\n\t/\\\u123f)))
+-- Note \\u in the b'...' case (\u sequence not translated)
+
+[case testEscapedQuote]
+'\''
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(')))
+--'
+
+[case testOctalEscapes]
+'\0\1\177\1234'
+b'\1\476'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(\u0000\u0001\u007fS4))
+  ExpressionStmt:2(
+    BytesExpr(\x01>)))
+
+[case testUnicodeLiteralInPython3]
+u'foo'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(foo)))
+
+[case testArrays]
+a = []
+a = [1, 2]
+a[[1]] = a[2]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    ListExpr:1())
+  AssignmentStmt:2(
+    NameExpr(a)
+    ListExpr:2(
+      IntExpr(1)
+      IntExpr(2)))
+  AssignmentStmt:3(
+    IndexExpr:3(
+      NameExpr(a)
+      ListExpr:3(
+        IntExpr(1)))
+    IndexExpr:3(
+      NameExpr(a)
+      IntExpr(2))))
+
+[case testTuples]
+()
+(1,)
+(1, foo)
+a, b = 1, (2, 3)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1())
+  ExpressionStmt:2(
+    TupleExpr:2(
+      IntExpr(1)))
+  ExpressionStmt:3(
+    TupleExpr:3(
+      IntExpr(1)
+      NameExpr(foo)))
+  AssignmentStmt:4(
+    TupleExpr:4(
+      NameExpr(a)
+      NameExpr(b))
+    TupleExpr:4(
+      IntExpr(1)
+      TupleExpr:4(
+        IntExpr(2)
+        IntExpr(3)))))
+
+[case testSimpleFunction]
+def main():
+  1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    main
+    Block:1(
+      ExpressionStmt:2(
+        IntExpr(1)))))
+
+[case testPass]
+def f():
+    pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      PassStmt:2())))
+
+[case testIf]
+if 1:
+    2
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      ExpressionStmt:2(
+        IntExpr(2)))))
+
+[case testIfElse]
+if 1:
+    2
+else:
+    3
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      ExpressionStmt:2(
+        IntExpr(2)))
+    Else(
+      ExpressionStmt:4(
+        IntExpr(3)))))
+
+[case testIfElif]
+if 1:
+    2
+elif 3:
+    4
+elif 5:
+    6
+else:
+    7
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      ExpressionStmt:2(
+        IntExpr(2)))
+    Else(
+      IfStmt:3(
+        If(
+          IntExpr(3))
+        Then(
+          ExpressionStmt:4(
+            IntExpr(4)))
+        Else(
+          IfStmt:5(
+            If(
+              IntExpr(5))
+            Then(
+              ExpressionStmt:6(
+                IntExpr(6)))
+            Else(
+              ExpressionStmt:8(
+                IntExpr(7)))))))))
+
+[case testWhile]
+while 1:
+    pass
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(1)
+    Block:1(
+      PassStmt:2())))
+
+[case testReturn]
+def f():
+    return 1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ReturnStmt:2(
+        IntExpr(1)))))
+
+
+[case testReturnWithoutValue]
+def f():
+    return
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ReturnStmt:2())))
+
+[case testBreak]
+while 1:
+    break
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(1)
+    Block:1(
+      BreakStmt:2())))
+
+[case testLargeBlock]
+if 1:
+    x = 1
+    while 2:
+        pass
+    y = 2
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      AssignmentStmt:2(
+        NameExpr(x)
+        IntExpr(1))
+      WhileStmt:3(
+        IntExpr(2)
+        Block:3(
+          PassStmt:4()))
+      AssignmentStmt:5(
+        NameExpr(y)
+        IntExpr(2)))))
+
+[case testSimpleClass]
+class A:
+    def f(self):
+        pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self))
+      Block:2(
+        PassStmt:3()))))
+
+[case testGlobalVarWithType]
+x = 0 # type: int
+y = False # type: bool
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(0)
+    int?)
+  AssignmentStmt:2(
+    NameExpr(y)
+    NameExpr(False)
+    bool?))
+
+[case testLocalVarWithType]
+def f():
+  x = 0 # type: int
+  y = False # type: bool
+  a = None # type: Any
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x)
+        IntExpr(0)
+        int?)
+      AssignmentStmt:3(
+        NameExpr(y)
+        NameExpr(False)
+        bool?)
+      AssignmentStmt:4(
+        NameExpr(a)
+        NameExpr(None)
+        Any?))))
+
+[case testFunctionDefWithType]
+def f(y: str) -> int:
+  return
+class A:
+  def f(self, a: int, b: Any) -> x:
+    pass
+  def g(self) -> Any:
+    pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(y))
+    def (y: str?) -> int?
+    Block:1(
+      ReturnStmt:2()))
+  ClassDef:3(
+    A
+    FuncDef:4(
+      f
+      Args(
+        Var(self)
+        Var(a)
+        Var(b))
+      def (self: Any, a: int?, b: Any?) -> x?
+      Block:4(
+        PassStmt:5()))
+    FuncDef:6(
+      g
+      Args(
+        Var(self))
+      def (self: Any) -> Any?
+      Block:6(
+        PassStmt:7()))))
+
+[case testFuncWithNoneReturn]
+def f() -> None:
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> None?
+    Block:1(
+      PassStmt:2())))
+
+[case testVarDefWithGenericType]
+x = None # type: List[str]
+y = None # type: Dict[int, Any]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    NameExpr(None)
+    List?[str?])
+  AssignmentStmt:2(
+    NameExpr(y)
+    NameExpr(None)
+    Dict?[int?, Any?]))
+
+[case testSignatureWithGenericTypes]
+def f(y: t[Any, x]) -> a[b[c], d]:
+    pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(y))
+    def (y: t?[Any?, x?]) -> a?[b?[c?], d?]
+    Block:1(
+      PassStmt:2())))
+
+[case testParsingExpressionsWithLessAndGreaterThan]
+# The operators < > can sometimes be confused with generic types.
+x = a < b > c
+f(x < b, y > c)
+a < b > 1
+x < b, y > 2
+(a < b > c)
+[out]
+MypyFile:1(
+  AssignmentStmt:2(
+    NameExpr(x)
+    ComparisonExpr:2(
+      <
+      >
+      NameExpr(a)
+      NameExpr(b)
+      NameExpr(c)))
+  ExpressionStmt:3(
+    CallExpr:3(
+      NameExpr(f)
+      Args(
+        ComparisonExpr:3(
+          <
+          NameExpr(x)
+          NameExpr(b))
+        ComparisonExpr:3(
+          >
+          NameExpr(y)
+          NameExpr(c)))))
+  ExpressionStmt:4(
+    ComparisonExpr:4(
+      <
+      >
+      NameExpr(a)
+      NameExpr(b)
+      IntExpr(1)))
+  ExpressionStmt:5(
+    TupleExpr:5(
+      ComparisonExpr:5(
+        <
+        NameExpr(x)
+        NameExpr(b))
+      ComparisonExpr:5(
+        >
+        NameExpr(y)
+        IntExpr(2))))
+  ExpressionStmt:6(
+    ComparisonExpr:6(
+      <
+      >
+      NameExpr(a)
+      NameExpr(b)
+      NameExpr(c))))
+
+[case testLineContinuation]
+if (1 +
+    2):
+  pass
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      OpExpr:1(
+        +
+        IntExpr(1)
+        IntExpr(2)))
+    Then(
+      PassStmt:3())))
+
+[case testMultipleVarDef]
+x, y = z # type: int, a[c]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(x)
+      NameExpr(y))
+    NameExpr(z)
+    Tuple[int?, a?[c?]]))
+
+[case testMultipleVarDef2]
+(xx, z, i) = 1 # type: (a[c], Any, int)
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(xx)
+      NameExpr(z)
+      NameExpr(i))
+    IntExpr(1)
+    Tuple[a?[c?], Any?, int?]))
+
+[case testMultipleVarDef3]
+(xx, (z, i)) = 1 # type: (a[c], (Any, int))
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(xx)
+      TupleExpr:1(
+        NameExpr(z)
+        NameExpr(i)))
+    IntExpr(1)
+    Tuple[a?[c?], Tuple[Any?, int?]]))
+
+[case testAnnotateAssignmentViaSelf]
+class A:
+    def __init__(self):
+        self.x = 1 # type: int
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(self))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(self)
+            x)
+          IntExpr(1)
+          int?)))))
+
+[case testCommentAfterTypeComment]
+x = 0 # type: int # bar!
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(0)
+    int?))
+
+[case testMultilineAssignmentAndAnnotations]
+(x,
+ y) = (1,
+      2) # type: foo, bar
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(x)
+      NameExpr(y))
+    TupleExpr:2(
+      IntExpr(1)
+      IntExpr(2))
+    Tuple[foo?, bar?]))
+
+[case testWhitespaceAndCommentAnnotation]
+x = 1#type:int
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(1)
+    int?))
+
+[case testWhitespaceAndCommentAnnotation2]
+x = 1#   type:   int
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(1)
+    int?))
+
+[case testWhitespaceAndCommentAnnotation3]
+x = 1# type : int       # not recognized!
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(1)))
+
+[case testInvalidAnnotation]
+x=1 ##type: int
+y=1 #.type: int
+z=1 # Type: int
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(1))
+  AssignmentStmt:2(
+    NameExpr(y)
+    IntExpr(1))
+  AssignmentStmt:3(
+    NameExpr(z)
+    IntExpr(1)))
+
+[case testEmptyClass]
+class C:
+  pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    C
+    PassStmt:2()))
+
+[case testOperatorPrecedence]
+a | b ^ c
+a & b << c
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    OpExpr:1(
+      |
+      NameExpr(a)
+      OpExpr:1(
+        ^
+        NameExpr(b)
+        NameExpr(c))))
+  ExpressionStmt:2(
+    OpExpr:2(
+      &
+      NameExpr(a)
+      OpExpr:2(
+        <<
+        NameExpr(b)
+        NameExpr(c)))))
+
+[case testOperatorAssociativity]
+1 - 2 + 3
+1 << 2 << 3
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    OpExpr:1(
+      +
+      OpExpr:1(
+        -
+        IntExpr(1)
+        IntExpr(2))
+      IntExpr(3)))
+  ExpressionStmt:2(
+    OpExpr:2(
+      <<
+      OpExpr:2(
+        <<
+        IntExpr(1)
+        IntExpr(2))
+      IntExpr(3))))
+
+[case testUnaryOperators]
+-2 * +3 * ~3 * 2
+~3**2
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    OpExpr:1(
+      *
+      OpExpr:1(
+        *
+        OpExpr:1(
+          *
+          UnaryExpr:1(
+            -
+            IntExpr(2))
+          UnaryExpr:1(
+            +
+            IntExpr(3)))
+        UnaryExpr:1(
+          ~
+          IntExpr(3)))
+      IntExpr(2)))
+  ExpressionStmt:2(
+    UnaryExpr:2(
+      ~
+      OpExpr:2(
+        **
+        IntExpr(3)
+        IntExpr(2)))))
+
+[case testSingleLineBodies]
+if 1: pass
+while 1: pass
+def f(): pass
+def g() -> int: return 1
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      PassStmt:1()))
+  WhileStmt:2(
+    IntExpr(1)
+    Block:2(
+      PassStmt:2()))
+  FuncDef:3(
+    f
+    Block:3(
+      PassStmt:3()))
+  FuncDef:4(
+    g
+    def () -> int?
+    Block:4(
+      ReturnStmt:4(
+        IntExpr(1)))))
+
+[case testForStatement]
+for x in y:
+  pass
+for x, (y, w) in z:
+  1
+for [x, (y, w)] in z:
+  1
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x)
+    NameExpr(y)
+    Block:1(
+      PassStmt:2()))
+  ForStmt:3(
+    TupleExpr:3(
+      NameExpr(x)
+      TupleExpr:3(
+        NameExpr(y)
+        NameExpr(w)))
+    NameExpr(z)
+    Block:3(
+      ExpressionStmt:4(
+        IntExpr(1))))
+  ForStmt:5(
+    ListExpr:5(
+      NameExpr(x)
+      TupleExpr:5(
+        NameExpr(y)
+        NameExpr(w)))
+    NameExpr(z)
+    Block:5(
+      ExpressionStmt:6(
+        IntExpr(1)))))
+
+[case testGlobalDecl]
+global x
+def f():
+  global x, y
+[out]
+MypyFile:1(
+  GlobalDecl:1(
+    x)
+  FuncDef:2(
+    f
+    Block:2(
+      GlobalDecl:3(
+        x
+        y))))
+
+[case testNonlocalDecl]
+def f():
+  def g():
+    nonlocal x, y
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      FuncDef:2(
+        g
+        Block:2(
+          NonlocalDecl:3(
+            x
+            y))))))
+
+[case testRaiseStatement]
+raise foo
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    NameExpr(foo)))
+
+[case testRaiseWithoutArg]
+try:
+  pass
+except:
+  raise
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    Block:3(
+      RaiseStmt:4())))
+
+[case testRaiseFrom]
+raise e from x
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    NameExpr(e)
+    NameExpr(x)))
+
+[case testBaseclasses]
+class A(B):
+  pass
+class A(B[T], C[Any, d[x]]):
+  pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    BaseTypeExpr(
+      NameExpr(B))
+    PassStmt:2())
+  ClassDef:3(
+    A
+    BaseTypeExpr(
+      IndexExpr:3(
+        NameExpr(B)
+        NameExpr(T))
+      IndexExpr:3(
+        NameExpr(C)
+        TupleExpr:3(
+          NameExpr(Any)
+          IndexExpr:3(
+            NameExpr(d)
+            NameExpr(x)))))
+    PassStmt:4()))
+
+[case testIsNot]
+x is not y
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ComparisonExpr:1(
+      is not
+      NameExpr(x)
+      NameExpr(y))))
+
+[case testNotIn]
+x not in y
+not x not in y
+x not in y | z
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ComparisonExpr:1(
+      not in
+      NameExpr(x)
+      NameExpr(y)))
+  ExpressionStmt:2(
+    UnaryExpr:2(
+      not
+      ComparisonExpr:2(
+        not in
+        NameExpr(x)
+        NameExpr(y))))
+  ExpressionStmt:3(
+    ComparisonExpr:3(
+      not in
+      NameExpr(x)
+      OpExpr:3(
+        |
+        NameExpr(y)
+        NameExpr(z)))))
+
+[case testNotAsBinaryOp]
+x not y # E: invalid syntax
+[out]
+
+[case testNotIs]
+x not is y # E: invalid syntax
+[out]
+
+[case testBinaryNegAsBinaryOp]
+1 ~ 2 # E: invalid syntax
+[out]
+
+[case testDictionaryExpression]
+{}
+{1:x}
+{1:x, 2 or 1:2 and 3}
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    DictExpr:1())
+  ExpressionStmt:2(
+    DictExpr:2(
+      IntExpr(1)
+      NameExpr(x)))
+  ExpressionStmt:3(
+    DictExpr:3(
+      IntExpr(1)
+      NameExpr(x)
+      OpExpr:3(
+        or
+        IntExpr(2)
+        IntExpr(1))
+      OpExpr:3(
+        and
+        IntExpr(2)
+        IntExpr(3)))))
+
+[case testImport]
+import x
+import y.z.foo, __foo__.bar
+[out]
+MypyFile:1(
+  Import:1(x)
+  Import:2(y.z.foo, __foo__.bar))
+
+[case testVariableTypeWithQualifiedName]
+x = None # type: x.y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    NameExpr(None)
+    x.y?))
+
+[case testTypeInSignatureWithQualifiedName]
+def f() -> x.y[a.b.c]: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> x.y?[a.b.c?]
+    Block:1(
+      PassStmt:1())))
+
+[case testImportFrom]
+from m import x
+from m.n import x, y, z
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [x])
+  ImportFrom:2(m.n, [x, y, z]))
+
+[case testImportFromAs]
+from m import x as y
+from x import y, z as a, c as c
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [x : y])
+  ImportFrom:2(x, [y, z : a, c : c]))
+
+[case testImportStar]
+from x import *
+[out]
+MypyFile:1(
+  ImportAll:1(x))
+
+[case testImportsInDifferentPlaces]
+1
+import x
+def f():
+  from x import y
+  from z import *
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IntExpr(1))
+  Import:2(x)
+  FuncDef:3(
+    f
+    Block:3(
+      ImportFrom:4(x, [y])
+      ImportAll:5(z))))
+
+[case testImportWithExtraComma]
+from x import (y, z,)
+[out]
+MypyFile:1(
+  ImportFrom:1(x, [y, z]))
+
+[case testDefaultArgs]
+def f(x=1):
+  pass
+def g(x, y=1+2, z=(1, 2)):
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Init(
+      AssignmentStmt:1(
+        NameExpr(x)
+        IntExpr(1)))
+    Block:1(
+      PassStmt:2()))
+  FuncDef:3(
+    g
+    Args(
+      Var(x)
+      Var(y)
+      Var(z))
+    Init(
+      AssignmentStmt:3(
+        NameExpr(y)
+        OpExpr:3(
+          +
+          IntExpr(1)
+          IntExpr(2)))
+      AssignmentStmt:3(
+        NameExpr(z)
+        TupleExpr:3(
+          IntExpr(1)
+          IntExpr(2))))
+    Block:3(
+      PassStmt:4())))
+
+[case testTryFinally]
+try:
+  1
+finally:
+  2
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        IntExpr(1)))
+    Finally(
+      ExpressionStmt:4(
+        IntExpr(2)))))
+
+[case testTry]
+try:
+  1
+except x:
+  2
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        IntExpr(1)))
+    NameExpr(x)
+    Block:3(
+      ExpressionStmt:4(
+        IntExpr(2)))))
+
+[case testComplexTry]
+try:
+  1
+except x as y:
+  2
+except x.y:
+  3
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        IntExpr(1)))
+    NameExpr(x)
+    NameExpr(y)
+    Block:3(
+      ExpressionStmt:4(
+        IntExpr(2)))
+    MemberExpr:5(
+      NameExpr(x)
+      y)
+    Block:5(
+      ExpressionStmt:6(
+        IntExpr(3)))))
+
+[case testGeneratorExpression]
+(x for y in z)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    GeneratorExpr:1(
+      NameExpr(x)
+      NameExpr(y)
+      NameExpr(z))))
+
+[case testGeneratorExpressionNested]
+(x for y, (p, q) in z)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    GeneratorExpr:1(
+      NameExpr(x)
+      TupleExpr:1(
+        NameExpr(y)
+        TupleExpr:1(
+          NameExpr(p)
+          NameExpr(q)))
+      NameExpr(z))))
+
+[case testListComprehension]
+x=[x for y in z]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    ListComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(x)
+        NameExpr(y)
+        NameExpr(z)))))
+
+[case testComplexListComprehension]
+x=[(x, y) for y, z in (1, 2)]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    ListComprehension:1(
+      GeneratorExpr:1(
+        TupleExpr:1(
+          NameExpr(x)
+          NameExpr(y))
+        TupleExpr:1(
+          NameExpr(y)
+          NameExpr(z))
+        TupleExpr:1(
+          IntExpr(1)
+          IntExpr(2))))))
+
+[case testListComprehension2]
+([x + 1 for x in a])
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ListComprehension:1(
+      GeneratorExpr:1(
+        OpExpr:1(
+          +
+          NameExpr(x)
+          IntExpr(1))
+        NameExpr(x)
+        NameExpr(a)))))
+
+[case testSlices]
+x[1:2]
+x[:1]
+x[1:]
+x[:]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IndexExpr:1(
+      NameExpr(x)
+      SliceExpr:-1(
+        IntExpr(1)
+        IntExpr(2))))
+  ExpressionStmt:2(
+    IndexExpr:2(
+      NameExpr(x)
+      SliceExpr:-1(
+        <empty>
+        IntExpr(1))))
+  ExpressionStmt:3(
+    IndexExpr:3(
+      NameExpr(x)
+      SliceExpr:-1(
+        IntExpr(1)
+        <empty>)))
+  ExpressionStmt:4(
+    IndexExpr:4(
+      NameExpr(x)
+      SliceExpr:-1(
+        <empty>
+        <empty>))))
+
+[case testSliceWithStride]
+x[1:2:3]
+x[1::2]
+x[:1:2]
+x[::2]
+x[1:2:]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IndexExpr:1(
+      NameExpr(x)
+      SliceExpr:-1(
+        IntExpr(1)
+        IntExpr(2)
+        IntExpr(3))))
+  ExpressionStmt:2(
+    IndexExpr:2(
+      NameExpr(x)
+      SliceExpr:-1(
+        IntExpr(1)
+        <empty>
+        IntExpr(2))))
+  ExpressionStmt:3(
+    IndexExpr:3(
+      NameExpr(x)
+      SliceExpr:-1(
+        <empty>
+        IntExpr(1)
+        IntExpr(2))))
+  ExpressionStmt:4(
+    IndexExpr:4(
+      NameExpr(x)
+      SliceExpr:-1(
+        <empty>
+        <empty>
+        IntExpr(2))))
+  ExpressionStmt:5(
+    IndexExpr:5(
+      NameExpr(x)
+      SliceExpr:-1(
+        IntExpr(1)
+        IntExpr(2)))))
+
+[case testYield]
+def f():
+    yield x + 1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ExpressionStmt:2(
+        YieldExpr:2(
+          OpExpr:2(
+            +
+            NameExpr(x)
+            IntExpr(1)))))))
+
+[case testYieldFrom]
+def f():
+    yield from h()
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ExpressionStmt:2(
+        YieldFromExpr:2(
+          CallExpr:2(
+            NameExpr(h)
+            Args()))))))
+
+[case testYieldFromAssignment]
+def f():
+    a = yield from h()
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(a)
+        YieldFromExpr:2(
+          CallExpr:2(
+            NameExpr(h)
+            Args()))))))
+
+[case testDel]
+del x
+del x[0], y[1]
+[out]
+MypyFile:1(
+  DelStmt:1(
+    NameExpr(x))
+  DelStmt:2(
+    TupleExpr:2(
+      IndexExpr:2(
+        NameExpr(x)
+        IntExpr(0))
+      IndexExpr:2(
+        NameExpr(y)
+        IntExpr(1)))))
+
+[case testExtraCommas]
+1, 2,
++[1, 2,]
+f(1,)
+{1:2,}
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1(
+      IntExpr(1)
+      IntExpr(2)))
+  ExpressionStmt:2(
+    UnaryExpr:2(
+      +
+      ListExpr:2(
+        IntExpr(1)
+        IntExpr(2))))
+  ExpressionStmt:3(
+    CallExpr:3(
+      NameExpr(f)
+      Args(
+        IntExpr(1))))
+  ExpressionStmt:4(
+    DictExpr:4(
+      IntExpr(1)
+      IntExpr(2))))
+
+[case testExtraCommaInFunc]
+def f(x,):
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      PassStmt:2())))
+
+[case testLambda]
+lambda: 1
+lambda x: y + 1
+lambda x, y: 1
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    LambdaExpr:1(
+      Block:1(
+        ReturnStmt:1(
+          IntExpr(1)))))
+  ExpressionStmt:2(
+    LambdaExpr:2(
+      Args(
+        Var(x))
+      Block:2(
+        ReturnStmt:2(
+          OpExpr:2(
+            +
+            NameExpr(y)
+            IntExpr(1))))))
+  ExpressionStmt:3(
+    LambdaExpr:3(
+      Args(
+        Var(x)
+        Var(y))
+      Block:3(
+        ReturnStmt:3(
+          IntExpr(1))))))
+
+[case testComplexLambda]
+lambda x=2: x
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    LambdaExpr:1(
+      Args(
+        Var(x))
+      Init(
+        AssignmentStmt:1(
+          NameExpr(x)
+          IntExpr(2)))
+      Block:1(
+        ReturnStmt:1(
+          NameExpr(x))))))
+
+[case testLambdaPrecedence]
+lambda x: 1, 2
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1(
+      LambdaExpr:1(
+        Args(
+          Var(x))
+        Block:1(
+          ReturnStmt:1(
+            IntExpr(1))))
+      IntExpr(2))))
+
+[case testForIndicesInParens]
+for (i, j) in x:
+  pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    TupleExpr:1(
+      NameExpr(i)
+      NameExpr(j))
+    NameExpr(x)
+    Block:1(
+      PassStmt:2())))
+
+[case testForAndTrailingCommaAfterIndexVar]
+for i, in x:
+    pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    TupleExpr:1(
+      NameExpr(i))
+    NameExpr(x)
+    Block:1(
+      PassStmt:2())))
+
+[case testListComprehensionAndTrailingCommaAfterIndexVar]
+x = [a for b, in c]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    ListComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(a)
+        TupleExpr:1(
+          NameExpr(b))
+        NameExpr(c)))))
+
+[case testForAndTrailingCommaAfterIndexVars]
+for i, j, in x:
+    pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    TupleExpr:1(
+      NameExpr(i)
+      NameExpr(j))
+    NameExpr(x)
+    Block:1(
+      PassStmt:2())))
+
+[case testGeneratorWithCondition]
+(x for y in z if 0)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    GeneratorExpr:1(
+      NameExpr(x)
+      NameExpr(y)
+      NameExpr(z)
+      IntExpr(0))))
+
+[case testListComprehensionWithCondition]
+raise [x for y in z if 0]
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    ListComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(x)
+        NameExpr(y)
+        NameExpr(z)
+        IntExpr(0)))))
+
+[case testListComprehensionWithConditions]
+raise [x for y in z if 0 if 1]
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    ListComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(x)
+        NameExpr(y)
+        NameExpr(z)
+        IntExpr(0)
+        IntExpr(1)))))
+
+[case testListComprehensionWithCrazyConditions]
+raise [x for y in z if (1 if 2 else 3) if 1]
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    ListComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(x)
+        NameExpr(y)
+        NameExpr(z)
+        ConditionalExpr:1(
+          Condition(
+            IntExpr(2))
+          IntExpr(1)
+          IntExpr(3))
+        IntExpr(1)))))
+
+[case testDictionaryComprehension]
+a = {x: y for x, y in xys}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    DictionaryComprehension:1(
+      NameExpr(x)
+      NameExpr(y)
+      TupleExpr:1(
+        NameExpr(x)
+        NameExpr(y))
+      NameExpr(xys))))
+
+[case testDictionaryComprehensionComplex]
+a = {x: y for x, y in xys for p, q in pqs if c}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    DictionaryComprehension:1(
+      NameExpr(x)
+      NameExpr(y)
+      TupleExpr:1(
+        NameExpr(x)
+        NameExpr(y))
+      TupleExpr:1(
+        NameExpr(p)
+        NameExpr(q))
+      NameExpr(xys)
+      NameExpr(pqs)
+      NameExpr(c))))
+
+[case testSetComprehension]
+a = {i for i in l}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    SetComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(i)
+        NameExpr(i)
+        NameExpr(l)))))
+
+[case testSetComprehensionComplex]
+a = {x + p for x in xys for p in pqs if c}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    SetComprehension:1(
+      GeneratorExpr:1(
+        OpExpr:1(
+          +
+          NameExpr(x)
+          NameExpr(p))
+        NameExpr(x)
+        NameExpr(p)
+        NameExpr(xys)
+        NameExpr(pqs)
+        NameExpr(c)))))
+
+[case testWithStatement]
+with open('foo') as f:
+  pass
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      CallExpr:1(
+        NameExpr(open)
+        Args(
+          StrExpr(foo))))
+    Target(
+      NameExpr(f))
+    Block:1(
+      PassStmt:2())))
+
+[case testWithStatementWithoutTarget]
+with foo:
+  pass
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(foo))
+    Block:1(
+      PassStmt:2())))
+
+[case testHexOctBinLiterals]
+0xa, 0Xaf, 0o7, 0O12, 0b1, 0B101
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1(
+      IntExpr(10)
+      IntExpr(175)
+      IntExpr(7)
+      IntExpr(10)
+      IntExpr(1)
+      IntExpr(5))))
+
+[case testImportFromWithParens]
+from x import (y)
+from x import (y,
+               z)
+[out]
+MypyFile:1(
+  ImportFrom:1(x, [y])
+  ImportFrom:2(x, [y, z]))
+
+[case testContinueStmt]
+while 1:
+  continue
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(1)
+    Block:1(
+      ContinueStmt:2())))
+
+[case testStrLiteralConcatenate]
+'f' 'bar'
+('x'
+ 'y'
+ 'z')
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(fbar))
+  ExpressionStmt:2(
+    StrExpr(xyz)))
+
+[case testCatchAllExcept]
+try:
+  1
+except:
+  pass
+try:
+  1
+except x:
+  pass
+except:
+  2
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        IntExpr(1)))
+    Block:3(
+      PassStmt:4()))
+  TryStmt:5(
+    Block:5(
+      ExpressionStmt:6(
+        IntExpr(1)))
+    NameExpr(x)
+    Block:7(
+      PassStmt:8())
+    Block:9(
+      ExpressionStmt:10(
+        IntExpr(2)))))
+
+[case testTryElse]
+try:
+  pass
+except x:
+  1
+else:
+  2
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    NameExpr(x)
+    Block:3(
+      ExpressionStmt:4(
+        IntExpr(1)))
+    Else(
+      ExpressionStmt:6(
+        IntExpr(2)))))
+
+[case testExceptWithMultipleTypes]
+try:
+  pass
+except (x, y):
+  pass
+except (a, b, c) as e:
+  pass
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    TupleExpr:3(
+      NameExpr(x)
+      NameExpr(y))
+    Block:3(
+      PassStmt:4())
+    TupleExpr:5(
+      NameExpr(a)
+      NameExpr(b)
+      NameExpr(c))
+    NameExpr(e)
+    Block:5(
+      PassStmt:6())))
+
+[case testNestedFunctions]
+def f():
+  def g():
+    pass
+def h() -> int:
+  def g() -> int:
+    pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      FuncDef:2(
+        g
+        Block:2(
+          PassStmt:3()))))
+  FuncDef:4(
+    h
+    def () -> int?
+    Block:4(
+      FuncDef:5(
+        g
+        def () -> int?
+        Block:5(
+          PassStmt:6())))))
+
+[case testStatementsAndDocStringsInClassBody]
+class A:
+  "doc string"
+  x = y
+  def f(self):
+    pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ExpressionStmt:2(
+      StrExpr(doc string))
+    AssignmentStmt:3(
+      NameExpr(x)
+      NameExpr(y))
+    FuncDef:4(
+      f
+      Args(
+        Var(self))
+      Block:4(
+        PassStmt:5()))))
+
+[case testSingleLineClass]
+class a: pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    a
+    PassStmt:1()))
+
+[case testDecorator]
+ at property
+def f():
+  pass
+[out]
+MypyFile:1(
+  Decorator:1(
+    Var(f)
+    NameExpr(property)
+    FuncDef:2(
+      f
+      Block:2(
+        PassStmt:3()))))
+
+[case testComplexDecorator]
+ at foo(bar, 1)
+ at zar
+def f() -> int:
+  pass
+[out]
+MypyFile:1(
+  Decorator:1(
+    Var(f)
+    CallExpr:1(
+      NameExpr(foo)
+      Args(
+        NameExpr(bar)
+        IntExpr(1)))
+    NameExpr(zar)
+    FuncDef:3(
+      f
+      def () -> int?
+      Block:3(
+        PassStmt:4()))))
+
+[case testKeywordArgInCall]
+f(x=1)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    CallExpr:1(
+      NameExpr(f)
+      Args()
+      KwArgs(
+        x
+        IntExpr(1)))))
+
+[case testComplexKeywordArgs]
+f(x, y=1 or 2, z=y)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    CallExpr:1(
+      NameExpr(f)
+      Args(
+        NameExpr(x))
+      KwArgs(
+        y
+        OpExpr:1(
+          or
+          IntExpr(1)
+          IntExpr(2)))
+      KwArgs(
+        z
+        NameExpr(y)))))
+
+[case testChainedAssignment]
+x = z = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x)
+      NameExpr(z))
+    IntExpr(1)))
+
+[case testVarArgs]
+def f(x, *a): pass
+f(1, *2)
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    VarArg(
+      Var(a))
+    Block:1(
+      PassStmt:1()))
+  ExpressionStmt:2(
+    CallExpr:2(
+      NameExpr(f)
+      Args(
+        IntExpr(1)
+        IntExpr(2))
+      VarArg)))
+
+[case testVarArgWithType]
+def f(x: str, *a: int): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: str?, *a: int?) -> Any
+    VarArg(
+      Var(a))
+    Block:1(
+      PassStmt:1())))
+
+[case testDictVarArgs]
+def f(x, **a): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    DictVarArg(
+      Var(a))
+    Block:1(
+      PassStmt:1())))
+
+[case testBothVarArgs]
+def f(x, *a, **b): pass
+def g(*a, **b): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    VarArg(
+      Var(a))
+    DictVarArg(
+      Var(b))
+    Block:1(
+      PassStmt:1()))
+  FuncDef:2(
+    g
+    VarArg(
+      Var(a))
+    DictVarArg(
+      Var(b))
+    Block:2(
+      PassStmt:2())))
+
+[case testDictVarArgsWithType]
+def f(x: X, **a: A) -> None: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: X?, **a: A?) -> None?
+    DictVarArg(
+      Var(a))
+    Block:1(
+      PassStmt:1())))
+
+[case testCallDictVarArgs]
+f(**x)
+f(x, **y)
+f(*x, **y)
+f(x, *y, **z)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    CallExpr:1(
+      NameExpr(f)
+      Args()
+      DictVarArg(
+        NameExpr(x))))
+  ExpressionStmt:2(
+    CallExpr:2(
+      NameExpr(f)
+      Args(
+        NameExpr(x))
+      DictVarArg(
+        NameExpr(y))))
+  ExpressionStmt:3(
+    CallExpr:3(
+      NameExpr(f)
+      Args(
+        NameExpr(x))
+      VarArg
+      DictVarArg(
+        NameExpr(y))))
+  ExpressionStmt:4(
+    CallExpr:4(
+      NameExpr(f)
+      Args(
+        NameExpr(x)
+        NameExpr(y))
+      VarArg
+      DictVarArg(
+        NameExpr(z)))))
+
+[case testAssert]
+assert x == y
+[out]
+MypyFile:1(
+  AssertStmt:1(
+    ComparisonExpr:1(
+      ==
+      NameExpr(x)
+      NameExpr(y))))
+
+[case testYieldWithoutExpressions]
+def f():
+  yield
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ExpressionStmt:2(
+        YieldExpr:2()))))
+
+[case testConditionalExpression]
+x if y else z
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ConditionalExpr:1(
+      Condition(
+        NameExpr(y))
+      NameExpr(x)
+      NameExpr(z))))
+
+[case testConditionalExpressionInListComprehension]
+a = [x if y else z for a in b]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    ListComprehension:1(
+      GeneratorExpr:1(
+        ConditionalExpr:1(
+          Condition(
+            NameExpr(y))
+          NameExpr(x)
+          NameExpr(z))
+        NameExpr(a)
+        NameExpr(b)))))
+
+[case testConditionalExpressionInTuple]
+1 if 2 else 3, 4
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1(
+      ConditionalExpr:1(
+        Condition(
+          IntExpr(2))
+        IntExpr(1)
+        IntExpr(3))
+      IntExpr(4))))
+
+[case testSetLiteral]
+{x or y}
+{1, 2}
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    SetExpr:1(
+      OpExpr:1(
+        or
+        NameExpr(x)
+        NameExpr(y))))
+  ExpressionStmt:2(
+    SetExpr:2(
+      IntExpr(1)
+      IntExpr(2))))
+
+[case testSetLiteralWithExtraComma]
+{x,}
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    SetExpr:1(
+      NameExpr(x))))
+
+[case testImportAs]
+import x as y
+import x, z as y, a.b as c, d as d
+[out]
+MypyFile:1(
+  Import:1(x : y)
+  Import:2(x, z : y, a.b : c, d : d))
+
+[case testForAndElse]
+for x in y:
+  pass
+else:
+  x
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x)
+    NameExpr(y)
+    Block:1(
+      PassStmt:2())
+    Else(
+      ExpressionStmt:4(
+        NameExpr(x)))))
+
+[case testWhileAndElse]
+while x:
+  pass
+else:
+  y
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    NameExpr(x)
+    Block:1(
+      PassStmt:2())
+    Else(
+      ExpressionStmt:4(
+        NameExpr(y)))))
+
+[case testWithAndMultipleOperands]
+with x as y, a as b:
+  pass
+with x(), y():
+  pass
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(x))
+    Target(
+      NameExpr(y))
+    Expr(
+      NameExpr(a))
+    Target(
+      NameExpr(b))
+    Block:1(
+      PassStmt:2()))
+  WithStmt:3(
+    Expr(
+      CallExpr:3(
+        NameExpr(x)
+        Args()))
+    Expr(
+      CallExpr:3(
+        NameExpr(y)
+        Args()))
+    Block:3(
+      PassStmt:4())))
+
+[case testOperatorAssignment]
+x += 1
+x -= 1
+x *= 1
+x /= 1
+x //= 1
+x %= 1
+x **= 1
+x |= 1
+x &= 1
+x ^= 1
+x >>= 1
+x <<= 1
+[out]
+MypyFile:1(
+  OperatorAssignmentStmt:1(
+    +
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:2(
+    -
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:3(
+    *
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:4(
+    /
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:5(
+    //
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:6(
+    %
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:7(
+    **
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:8(
+    |
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:9(
+    &
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:10(
+    ^
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:11(
+    >>
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:12(
+    <<
+    NameExpr(x)
+    IntExpr(1)))
+
+[case testNestedClasses]
+class A:
+  class B:
+    pass
+  class C:
+    pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ClassDef:2(
+      B
+      PassStmt:3())
+    ClassDef:4(
+      C
+      PassStmt:5())))
+
+[case testTryWithExceptAndFinally]
+try:
+  pass
+except x:
+  x
+finally:
+  y
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    NameExpr(x)
+    Block:3(
+      ExpressionStmt:4(
+        NameExpr(x)))
+    Finally(
+      ExpressionStmt:6(
+        NameExpr(y)))))
+
+[case testBareAsteriskInFuncDef]
+def f(x, *, y=1): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    MaxPos(1)
+    Args(
+      Var(x)
+      Var(y))
+    Init(
+      AssignmentStmt:1(
+        NameExpr(y)
+        IntExpr(1)))
+    Block:1(
+      PassStmt:1())))
+
+[case testBareAsteriskInFuncDefWithSignature]
+def f(x: A, *, y: B = 1) -> None: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    MaxPos(1)
+    Args(
+      Var(x)
+      Var(y))
+    def (x: A?, *, y: B? =) -> None?
+    Init(
+      AssignmentStmt:1(
+        NameExpr(y)
+        IntExpr(1)))
+    Block:1(
+      PassStmt:1())))
+
+[case testBareAsteriskNamedDefault]
+def f(*, y: B = 1) -> None: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    MaxPos(0)
+    Args(
+      Var(y))
+    def (*, y: B? =) -> None?
+    Init(
+      AssignmentStmt:1(
+        NameExpr(y)
+        IntExpr(1)))
+    Block:1(
+      PassStmt:1())))
+
+[case testBareAsteriskNamedNoDefault]
+def f(*, y: B) -> None: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    MaxPos(0)
+    Args(
+      Var(y))
+    def (*, y: B?) -> None?
+    Block:1(
+      PassStmt:1())))
+
+[case testSuperExpr]
+super().x
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    SuperExpr:1(
+      x)))
+
+[case testKeywordAndDictArgs]
+f(x = y, **kwargs)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    CallExpr:1(
+      NameExpr(f)
+      Args()
+      KwArgs(
+        x
+        NameExpr(y))
+      DictVarArg(
+        NameExpr(kwargs)))))
+
+[case testSimpleFunctionType]
+f = None # type: Callable[[], None]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(f)
+    NameExpr(None)
+    Callable?[<TypeList >, None?]))
+
+[case testFunctionTypeWithArgument]
+f = None # type: Callable[[str], int]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(f)
+    NameExpr(None)
+    Callable?[<TypeList str?>, int?]))
+
+[case testFunctionTypeWithTwoArguments]
+f = None # type: Callable[[a[b], x.y], List[int]]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(f)
+    NameExpr(None)
+    Callable?[<TypeList a?[b?], x.y?>, List?[int?]]))
+
+[case testFunctionTypeWithExtraComma]
+def f(x: Callable[[str,], int]): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: Callable?[<TypeList str?>, int?]) -> Any
+    Block:1(
+      PassStmt:1())))
+
+[case testSimpleStringLiteralType]
+def f() -> 'A': pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?
+    Block:1(
+      PassStmt:1())))
+
+[case testGenericStringLiteralType]
+def f() -> 'A[B, C]': pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?[B?, C?]
+    Block:1(
+      PassStmt:1())))
+
+[case testPartialStringLiteralType]
+def f() -> A['B', C]: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?[B?, C?]
+    Block:1(
+      PassStmt:1())))
+
+[case testWhitespaceInStringLiteralType]
+def f() -> '  A  [  X  ]  ': pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?[X?]
+    Block:1(
+      PassStmt:1())))
+
+[case testEscapeInStringLiteralType]
+def f() -> '\x41': pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?
+    Block:1(
+      PassStmt:1())))
+
+[case testMetaclass]
+class Foo(metaclass=Bar): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    Metaclass(Bar)
+    PassStmt:1()))
+
+[case testQualifiedMetaclass]
+class Foo(metaclass=foo.Bar): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    Metaclass(foo.Bar)
+    PassStmt:1()))
+
+[case testBaseAndMetaclass]
+class Foo(foo.bar[x], metaclass=Bar): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    Metaclass(Bar)
+    BaseTypeExpr(
+      IndexExpr:1(
+        MemberExpr:1(
+          NameExpr(foo)
+          bar)
+        NameExpr(x)))
+    PassStmt:1()))
+
+[case testClassKeywordArgs]
+class Foo(_root=None): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    PassStmt:1()))
+
+[case testClassKeywordArgsBeforeMeta]
+class Foo(_root=None, metaclass=Bar): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    Metaclass(Bar)
+    PassStmt:1()))
+
+[case testClassKeywordArgsAfterMeta]
+class Foo(metaclass=Bar, _root=None): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    Metaclass(Bar)
+    PassStmt:1()))
+
+[case testNamesThatAreNoLongerKeywords]
+any = interface
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(any)
+    NameExpr(interface)))
+
+[case testFunctionOverload]
+ at overload
+def f() -> x: pass
+ at overload
+def f() -> y: pass
+[out]
+MypyFile:1(
+  OverloadedFuncDef:1(
+    Decorator:1(
+      Var(f)
+      NameExpr(overload)
+      FuncDef:2(
+        f
+        def () -> x?
+        Block:2(
+          PassStmt:2())))
+    Decorator:3(
+      Var(f)
+      NameExpr(overload)
+      FuncDef:4(
+        f
+        def () -> y?
+        Block:4(
+          PassStmt:4())))))
+
+[case testFunctionOverloadAndOtherStatements]
+x
+ at overload
+def f() -> x: pass
+ at overload
+def f() -> y: pass
+x
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(x))
+  OverloadedFuncDef:2(
+    Decorator:2(
+      Var(f)
+      NameExpr(overload)
+      FuncDef:3(
+        f
+        def () -> x?
+        Block:3(
+          PassStmt:3())))
+    Decorator:4(
+      Var(f)
+      NameExpr(overload)
+      FuncDef:5(
+        f
+        def () -> y?
+        Block:5(
+          PassStmt:5()))))
+  ExpressionStmt:6(
+    NameExpr(x)))
+
+[case testFunctionOverloadWithThreeVariants]
+ at overload
+def f() -> x: pass
+ at overload
+def f() -> y: pass
+ at overload
+def f(y): pass
+[out]
+MypyFile:1(
+  OverloadedFuncDef:1(
+    Decorator:1(
+      Var(f)
+      NameExpr(overload)
+      FuncDef:2(
+        f
+        def () -> x?
+        Block:2(
+          PassStmt:2())))
+    Decorator:3(
+      Var(f)
+      NameExpr(overload)
+      FuncDef:4(
+        f
+        def () -> y?
+        Block:4(
+          PassStmt:4())))
+    Decorator:5(
+      Var(f)
+      NameExpr(overload)
+      FuncDef:6(
+        f
+        Args(
+          Var(y))
+        Block:6(
+          PassStmt:6())))))
+
+[case testDecoratorsThatAreNotOverloads]
+ at foo
+def f() -> x: pass
+ at foo
+def g() -> y: pass
+[out]
+MypyFile:1(
+  Decorator:1(
+    Var(f)
+    NameExpr(foo)
+    FuncDef:2(
+      f
+      def () -> x?
+      Block:2(
+        PassStmt:2())))
+  Decorator:3(
+    Var(g)
+    NameExpr(foo)
+    FuncDef:4(
+      g
+      def () -> y?
+      Block:4(
+        PassStmt:4()))))
+
+[case testFunctionOverloadWithinFunction]
+def f():
+    @overload
+    def g(): pass
+    @overload
+    def g() -> x: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      OverloadedFuncDef:2(
+        Decorator:2(
+          Var(g)
+          NameExpr(overload)
+          FuncDef:3(
+            g
+            Block:3(
+              PassStmt:3())))
+        Decorator:4(
+          Var(g)
+          NameExpr(overload)
+          FuncDef:5(
+            g
+            def () -> x?
+            Block:5(
+              PassStmt:5())))))))
+
+[case testCommentFunctionAnnotation]
+def f(): # type: () -> A
+  pass
+def g(x): # type: (A) -> B
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?
+    Block:1(
+      PassStmt:2()))
+  FuncDef:3(
+    g
+    Args(
+      Var(x))
+    def (x: A?) -> B?
+    Block:3(
+      PassStmt:4())))
+
+[case testCommentMethodAnnotation]
+class A:
+  def f(self): # type: () -> A
+    pass
+  def g(xself, x): # type: (A) -> B
+    pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self))
+      def (self: Any) -> A?
+      Block:2(
+        PassStmt:3()))
+    FuncDef:4(
+      g
+      Args(
+        Var(xself)
+        Var(x))
+      def (xself: Any, x: A?) -> B?
+      Block:4(
+        PassStmt:5()))))
+
+[case testCommentMethodAnnotationAndNestedFunction]
+class A:
+  def f(self): # type: () -> A
+    def g(x): # type: (A) -> B
+      pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self))
+      def (self: Any) -> A?
+      Block:2(
+        FuncDef:3(
+          g
+          Args(
+            Var(x))
+          def (x: A?) -> B?
+          Block:3(
+            PassStmt:4()))))))
+
+[case testCommentFunctionAnnotationOnSeparateLine]
+def f(x):
+  # type: (X) -> Y
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: X?) -> Y?
+    Block:1(
+      PassStmt:3())))
+
+[case testCommentFunctionAnnotationOnSeparateLine2]
+def f(x):
+
+     # type: (X) -> Y       # bar
+
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: X?) -> Y?
+    Block:1(
+      PassStmt:5())))
+
+[case testCommentFunctionAnnotationAndVarArg]
+def f(x, *y): # type: (X, *Y) -> Z
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: X?, *y: Y?) -> Z?
+    VarArg(
+      Var(y))
+    Block:1(
+      PassStmt:2())))
+
+[case testCommentFunctionAnnotationAndAllVarArgs]
+def f(x, *y, **z): # type: (X, *Y, **Z) -> A
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: X?, *y: Y?, **z: Z?) -> A?
+    VarArg(
+      Var(y))
+    DictVarArg(
+      Var(z))
+    Block:1(
+      PassStmt:2())))
+
+[case testClassDecorator]
+ at foo
+class X: pass
+ at foo(bar)
+ at x.y
+class Z: pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    X
+    Decorators(
+      NameExpr(foo))
+    PassStmt:2())
+  ClassDef:3(
+    Z
+    Decorators(
+      CallExpr:3(
+        NameExpr(foo)
+        Args(
+          NameExpr(bar)))
+      MemberExpr:4(
+        NameExpr(x)
+        y))
+    PassStmt:5()))
+
+[case testTrailingSemicolon]
+def x():
+    pass;
+
+def y():
+    pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    x
+    Block:1(
+      PassStmt:2()))
+  FuncDef:4(
+    y
+    Block:4(
+      PassStmt:5())))
+
+[case testEmptySuperClass]
+class A():
+    pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    PassStmt:2()))
+
+[case testStarExpression]
+*a
+*a, b
+a, *b
+a, (*x, y)
+a, (x, *y)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StarExpr:1(
+      NameExpr(a)))
+  ExpressionStmt:2(
+    TupleExpr:2(
+      StarExpr:2(
+        NameExpr(a))
+      NameExpr(b)))
+  ExpressionStmt:3(
+    TupleExpr:3(
+      NameExpr(a)
+      StarExpr:3(
+        NameExpr(b))))
+  ExpressionStmt:4(
+    TupleExpr:4(
+      NameExpr(a)
+      TupleExpr:4(
+        StarExpr:4(
+          NameExpr(x))
+        NameExpr(y))))
+  ExpressionStmt:5(
+    TupleExpr:5(
+      NameExpr(a)
+      TupleExpr:5(
+        NameExpr(x)
+        StarExpr:5(
+          NameExpr(y))))))
+
+[case testStarExpressionParenthesis]
+*(a)
+*(a,b)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StarExpr:1(
+      NameExpr(a)))
+  ExpressionStmt:2(
+    StarExpr:2(
+      TupleExpr:2(
+        NameExpr(a)
+        NameExpr(b)))))
+
+[case testStarExpressionInFor]
+for *a in b:
+    pass
+
+for a, *b in c:
+    pass
+
+for *a, b in c:
+    pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    StarExpr:1(
+      NameExpr(a))
+    NameExpr(b)
+    Block:1(
+      PassStmt:2()))
+  ForStmt:4(
+    TupleExpr:4(
+      NameExpr(a)
+      StarExpr:4(
+        NameExpr(b)))
+    NameExpr(c)
+    Block:4(
+      PassStmt:5()))
+  ForStmt:7(
+    TupleExpr:7(
+      StarExpr:7(
+        NameExpr(a))
+      NameExpr(b))
+    NameExpr(c)
+    Block:7(
+      PassStmt:8())))
+
+[case testStarExprInGeneratorExpr]
+(x for y, *p in z)
+(x for *p, y in z)
+(x for y, *p, q in z)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    GeneratorExpr:1(
+      NameExpr(x)
+      TupleExpr:1(
+        NameExpr(y)
+        StarExpr:1(
+          NameExpr(p)))
+      NameExpr(z)))
+  ExpressionStmt:2(
+    GeneratorExpr:2(
+      NameExpr(x)
+      TupleExpr:2(
+        StarExpr:2(
+          NameExpr(p))
+        NameExpr(y))
+      NameExpr(z)))
+  ExpressionStmt:3(
+    GeneratorExpr:3(
+      NameExpr(x)
+      TupleExpr:3(
+        NameExpr(y)
+        StarExpr:3(
+          NameExpr(p))
+        NameExpr(q))
+      NameExpr(z))))
+
+[case testParseNamedtupleBaseclass]
+class A(namedtuple('x', ['y'])): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    BaseTypeExpr(
+      CallExpr:1(
+        NameExpr(namedtuple)
+        Args(
+          StrExpr(x)
+          ListExpr:1(
+            StrExpr(y)))))
+    PassStmt:1()))
+
+[case testEllipsis]
+...
+a[1,...,2]
+....__class__
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    Ellipsis)
+  ExpressionStmt:2(
+    IndexExpr:2(
+      NameExpr(a)
+      TupleExpr:2(
+        IntExpr(1)
+        Ellipsis
+        IntExpr(2))))
+  ExpressionStmt:3(
+    MemberExpr:3(
+      Ellipsis
+      __class__)))
+
+[case testFunctionWithManyKindsOfArgs]
+def f(x, *args,  y=None, **kw): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    MaxPos(1)
+    Args(
+      Var(x)
+      Var(y))
+    Init(
+      AssignmentStmt:1(
+        NameExpr(y)
+        NameExpr(None)))
+    VarArg(
+      Var(args))
+    DictVarArg(
+      Var(kw))
+    Block:1(
+      PassStmt:1())))
+
+[case testIfWithSemicolons]
+if 1: a; b
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      ExpressionStmt:1(
+        NameExpr(a))
+      ExpressionStmt:1(
+        NameExpr(b)))))
+
+[case testIfWithSemicolonsNested]
+while 2:
+    if 1: a; b
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(2)
+    Block:1(
+      IfStmt:2(
+        If(
+          IntExpr(1))
+        Then(
+          ExpressionStmt:2(
+            NameExpr(a))
+          ExpressionStmt:2(
+            NameExpr(b)))))))
+
+[case testIfElseWithSemicolons]
+if 1: global x; y = 1
+else: x = 1; return 3
+4
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      GlobalDecl:1(
+        x)
+      AssignmentStmt:1(
+        NameExpr(y)
+        IntExpr(1)))
+    Else(
+      AssignmentStmt:2(
+        NameExpr(x)
+        IntExpr(1))
+      ReturnStmt:2(
+        IntExpr(3))))
+  ExpressionStmt:3(
+    IntExpr(4)))
+
+[case testIfElseWithSemicolonsNested]
+while 2:
+    if 1: global x; y = 1
+    else: x = 1; return 3
+4
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(2)
+    Block:1(
+      IfStmt:2(
+        If(
+          IntExpr(1))
+        Then(
+          GlobalDecl:2(
+            x)
+          AssignmentStmt:2(
+            NameExpr(y)
+            IntExpr(1)))
+        Else(
+          AssignmentStmt:3(
+            NameExpr(x)
+            IntExpr(1))
+          ReturnStmt:3(
+            IntExpr(3))))))
+  ExpressionStmt:4(
+    IntExpr(4)))
+
+[case testKeywordArgumentAfterStarArgumentInCall]
+f(x=1, *y)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    CallExpr:1(
+      NameExpr(f)
+      Args(
+        NameExpr(y))
+      VarArg
+      KwArgs(
+        x
+        IntExpr(1)))))
+
+[case testConditionalExpressionInSetComprehension]
+{ 1 if x else 2 for x in y }
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    SetComprehension:1(
+      GeneratorExpr:1(
+        ConditionalExpr:1(
+          Condition(
+            NameExpr(x))
+          IntExpr(1)
+          IntExpr(2))
+        NameExpr(x)
+        NameExpr(y)))))
+
+[case testConditionalExpressionInListComprehension]
+a = [ 1 if x else 2 for x in y ]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    ListComprehension:1(
+      GeneratorExpr:1(
+        ConditionalExpr:1(
+          Condition(
+            NameExpr(x))
+          IntExpr(1)
+          IntExpr(2))
+        NameExpr(x)
+        NameExpr(y)))))
+
+[case testComplexWithLvalue]
+with x as y.z: pass
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(x))
+    Target(
+      MemberExpr:1(
+        NameExpr(y)
+        z))
+    Block:1(
+      PassStmt:1())))
+
+[case testRelativeImportWithEllipsis]
+from ... import x
+[out]
+MypyFile:1(
+  ImportFrom:1(..., [x]))
+
+[case testRelativeImportWithEllipsis2]
+from .... import x
+[out]
+MypyFile:1(
+  ImportFrom:1(...., [x]))
+
+[case testParseExtendedSlicing]
+a[:, :]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IndexExpr:1(
+      NameExpr(a)
+      TupleExpr:-1(
+        SliceExpr:-1(
+          <empty>
+          <empty>)
+        SliceExpr:-1(
+          <empty>
+          <empty>)))))
+
+[case testParseExtendedSlicing2]
+a[1:2:, :,]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IndexExpr:1(
+      NameExpr(a)
+      TupleExpr:-1(
+        SliceExpr:-1(
+          IntExpr(1)
+          IntExpr(2))
+        SliceExpr:-1(
+          <empty>
+          <empty>)))))
+
+[case testParseExtendedSlicing3]
+a[1:2:3, ..., 1]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IndexExpr:1(
+      NameExpr(a)
+      TupleExpr:-1(
+        SliceExpr:-1(
+          IntExpr(1)
+          IntExpr(2)
+          IntExpr(3))
+        Ellipsis
+        IntExpr(1)))))
+
+[case testParseIfExprInDictExpr]
+test =  { 'spam': 'eggs' if True else 'bacon' }
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(test)
+    DictExpr:1(
+      StrExpr(spam)
+      ConditionalExpr:1(
+        Condition(
+          NameExpr(True))
+        StrExpr(eggs)
+        StrExpr(bacon)))))
+
+[case testIgnoreLine]
+import x # type: ignore
+[out]
+MypyFile:1(
+  Import:1(x)
+  IgnoredLines(1))
+
+[case testIgnore2Lines]
+x
+y # type: ignore
+z # type: ignore
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(x))
+  ExpressionStmt:2(
+    NameExpr(y))
+  ExpressionStmt:3(
+    NameExpr(z))
+  IgnoredLines(2, 3))
+
+[case testCommentedOutIgnoreAnnotation]
+y ## type: ignore
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(y)))
+
+[case testInvalidIgnoreAnnotations]
+y # type: ignored
+y # type: IGNORE
+y # type : ignore
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(y))
+  ExpressionStmt:2(
+    NameExpr(y))
+  ExpressionStmt:3(
+    NameExpr(y)))
+
+[case testSpaceInIgnoreAnnotations]
+y #  type:  ignore    # foo
+y #type:ignore
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(y))
+  ExpressionStmt:2(
+    NameExpr(y))
+  IgnoredLines(1, 2))
+
+[case testIgnoreAnnotationAndMultilineStatement]
+x = {
+  1: 2  # type: ignore
+}
+y = {   # type: ignore
+  1: 2
+}       # type: ignore
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    DictExpr:1(
+      IntExpr(1)
+      IntExpr(2)))
+  AssignmentStmt:4(
+    NameExpr(y)
+    DictExpr:4(
+      IntExpr(1)
+      IntExpr(2)))
+  IgnoredLines(2, 4, 6))
+
+[case testIgnoreAnnotationAndMultilineStatement2]
+from m import ( # type: ignore
+  x, y
+)
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [x, y])
+  IgnoredLines(1))
+
+[case testYieldExpression]
+def f():
+    x = yield f()
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x)
+        YieldExpr:2(
+          CallExpr:2(
+            NameExpr(f)
+            Args()))))))
+
+[case testForWithSingleItemTuple]
+for x in 1,: pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x)
+    TupleExpr:1(
+      IntExpr(1))
+    Block:1(
+      PassStmt:1())))
+
+[case testIsoLatinUnixEncoding]
+# coding: iso-latin-1-unix
+[out]
+MypyFile:1()
+
+[case testLatinUnixEncoding]
+# coding: latin-1-unix
+[out]
+MypyFile:1()
+
+[case testLatinUnixEncoding]
+# coding: iso-latin-1
+[out]
+MypyFile:1()
+
+[case testYieldExpressionInParens]
+def f():
+    (yield)
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ExpressionStmt:2(
+        YieldExpr:2()))))
diff --git a/test-data/unit/plugins/attrhook.py b/test-data/unit/plugins/attrhook.py
new file mode 100644
index 0000000..d94a5d6
--- /dev/null
+++ b/test-data/unit/plugins/attrhook.py
@@ -0,0 +1,21 @@
+from typing import Optional, Callable
+
+from mypy.plugin import Plugin, AttributeContext
+from mypy.types import Type, Instance
+
+
+class AttrPlugin(Plugin):
+    def get_attribute_hook(self, fullname: str) -> Optional[Callable[[AttributeContext], Type]]:
+        if fullname == 'm.Signal.__call__':
+            return signal_call_callback
+        return None
+
+
+def signal_call_callback(ctx: AttributeContext) -> Type:
+    if isinstance(ctx.type, Instance) and ctx.type.type.fullname() == 'm.Signal':
+        return ctx.type.args[0]
+    return ctx.inferred_attr_type
+
+
+def plugin(version):
+    return AttrPlugin
diff --git a/test-data/unit/plugins/badreturn.py b/test-data/unit/plugins/badreturn.py
new file mode 100644
index 0000000..fd74306
--- /dev/null
+++ b/test-data/unit/plugins/badreturn.py
@@ -0,0 +1,2 @@
+def plugin(version):
+    pass
diff --git a/test-data/unit/plugins/badreturn2.py b/test-data/unit/plugins/badreturn2.py
new file mode 100644
index 0000000..c7e0447
--- /dev/null
+++ b/test-data/unit/plugins/badreturn2.py
@@ -0,0 +1,5 @@
+class MyPlugin:
+    pass
+
+def plugin(version):
+    return MyPlugin
diff --git a/test-data/unit/plugins/fnplugin.py b/test-data/unit/plugins/fnplugin.py
new file mode 100644
index 0000000..684d634
--- /dev/null
+++ b/test-data/unit/plugins/fnplugin.py
@@ -0,0 +1,14 @@
+from mypy.plugin import Plugin
+
+class MyPlugin(Plugin):
+    def get_function_hook(self, fullname):
+        if fullname == '__main__.f':
+            return my_hook
+        assert fullname is not None
+        return None
+
+def my_hook(ctx):
+    return ctx.api.named_generic_type('builtins.int', [])
+
+def plugin(version):
+    return MyPlugin
diff --git a/test-data/unit/plugins/named_callable.py b/test-data/unit/plugins/named_callable.py
new file mode 100644
index 0000000..e40d181
--- /dev/null
+++ b/test-data/unit/plugins/named_callable.py
@@ -0,0 +1,28 @@
+from mypy.plugin import Plugin
+from mypy.types import CallableType
+
+
+class MyPlugin(Plugin):
+    def get_function_hook(self, fullname):
+        if fullname == 'm.decorator1':
+            return decorator_call_hook
+        if fullname == 'm._decorated':  # This is a dummy name generated by the plugin
+            return decorate_hook
+        return None
+
+
+def decorator_call_hook(ctx):
+    if isinstance(ctx.default_return_type, CallableType):
+        return ctx.default_return_type.copy_modified(name='m._decorated')
+    return ctx.default_return_type
+
+
+def decorate_hook(ctx):
+    if isinstance(ctx.default_return_type, CallableType):
+        return ctx.default_return_type.copy_modified(
+            ret_type=ctx.api.named_generic_type('builtins.str', []))
+    return ctx.default_return_type
+
+
+def plugin(version):
+    return MyPlugin
diff --git a/test-data/unit/plugins/noentry.py b/test-data/unit/plugins/noentry.py
new file mode 100644
index 0000000..c591ad1
--- /dev/null
+++ b/test-data/unit/plugins/noentry.py
@@ -0,0 +1 @@
+# empty plugin
diff --git a/test-data/unit/plugins/plugin2.py b/test-data/unit/plugins/plugin2.py
new file mode 100644
index 0000000..b530a62
--- /dev/null
+++ b/test-data/unit/plugins/plugin2.py
@@ -0,0 +1,13 @@
+from mypy.plugin import Plugin
+
+class Plugin2(Plugin):
+    def get_function_hook(self, fullname):
+        if fullname in ('__main__.f', '__main__.g'):
+            return str_hook
+        return None
+
+def str_hook(ctx):
+    return ctx.api.named_generic_type('builtins.str', [])
+
+def plugin(version):
+    return Plugin2
diff --git a/test-data/unit/plugins/type_anal_hook.py b/test-data/unit/plugins/type_anal_hook.py
new file mode 100644
index 0000000..0e7a0ee
--- /dev/null
+++ b/test-data/unit/plugins/type_anal_hook.py
@@ -0,0 +1,37 @@
+from typing import Optional, Callable
+
+from mypy.plugin import Plugin, AnalyzeTypeContext
+from mypy.types import Type, UnboundType, TypeList, AnyType, NoneTyp, CallableType
+
+
+class TypeAnalyzePlugin(Plugin):
+    def get_type_analyze_hook(self, fullname: str
+                              ) -> Optional[Callable[[AnalyzeTypeContext], Type]]:
+        if fullname == 'm.Signal':
+            return signal_type_analyze_callback
+        return None
+
+
+def signal_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:
+    if (len(ctx.type.args) != 1
+            or not isinstance(ctx.type.args[0], TypeList)):
+        ctx.api.fail('Invalid "Signal" type (expected "Signal[[t, ...]]")', ctx.context)
+        return AnyType()
+
+    args = ctx.type.args[0]
+    assert isinstance(args, TypeList)
+    analyzed = ctx.api.analyze_callable_args(args)
+    if analyzed is None:
+        return AnyType()  # Error generated elsewhere
+    arg_types, arg_kinds, arg_names = analyzed
+    arg_types = [ctx.api.analyze_type(arg) for arg in arg_types]
+    type_arg = CallableType(arg_types,
+                            arg_kinds,
+                            arg_names,
+                            NoneTyp(),
+                            ctx.api.named_type('builtins.function', []))
+    return ctx.api.named_type('m.Signal', [type_arg])
+
+
+def plugin(version):
+    return TypeAnalyzePlugin
diff --git a/test-data/unit/python2eval.test b/test-data/unit/python2eval.test
new file mode 100644
index 0000000..b750de8
--- /dev/null
+++ b/test-data/unit/python2eval.test
@@ -0,0 +1,474 @@
+-- Test cases for type checking mypy programs using full stubs and running
+-- using CPython (Python 2 mode).
+--
+-- These are mostly regression tests -- no attempt is made to make these
+-- complete.
+
+
+[case testAbs2_python2]
+n = None  # type: int
+f = None  # type: float
+n = abs(1)
+abs(1) + 'x'  # Error
+f = abs(1.1)
+abs(1.1) + 'x'  # Error
+[out]
+_program.py:4: error: Unsupported operand types for + ("int" and "str")
+_program.py:6: error: Unsupported operand types for + ("float" and "str")
+
+[case testUnicode_python2]
+x = unicode('xyz', 'latin1')
+print x
+x = u'foo'
+print repr(x)
+[out]
+xyz
+u'foo'
+
+[case testXrangeAndRange_python2]
+for i in xrange(2):
+    print i
+for i in range(3):
+    print i
+[out]
+0
+1
+0
+1
+2
+
+[case testIterator_python2]
+import typing, sys
+x = iter('bar')
+print x.next(), x.next()
+[out]
+b a
+
+[case testEncodeAndDecode_python2]
+print 'a'.encode('latin1')
+print 'b'.decode('latin1')
+print u'c'.encode('latin1')
+print u'd'.decode('latin1')
+[out]
+a
+b
+c
+d
+
+[case testHasKey_python2]
+d = {1: 'x'}
+print d.has_key(1)
+print d.has_key(2)
+[out]
+True
+False
+
+[case testIntegerDivision_python2]
+x = 1 / 2
+x()
+[out]
+_program.py:2: error: "int" not callable
+
+[case testFloatDivision_python2]
+x = 1.0 / 2.0
+x = 1.0 / 2
+x = 1 / 2.0
+x = 1.5
+[out]
+
+[case testAnyStr_python2]
+from typing import AnyStr
+def f(x): # type: (AnyStr) -> AnyStr
+    if isinstance(x, str):
+        return 'foo'
+    else:
+        return u'zar'
+print f('')
+print f(u'')
+[out]
+foo
+zar
+
+[case testGenericPatterns_python2]
+from typing import Pattern
+import re
+p = None  # type: Pattern[unicode]
+p = re.compile(u'foo*')
+b = None  # type: Pattern[str]
+b = re.compile('foo*')
+print(p.match(u'fooo').group(0))
+[out]
+fooo
+
+[case testGenericMatch_python2]
+from typing import Match
+import re
+def f(m): # type: (Match[str]) -> None
+    print(m.group(0))
+f(re.match('x*', 'xxy'))
+[out]
+xx
+
+[case testVariableLengthTuple_python2]
+from typing import Tuple, cast
+x = cast(Tuple[int, ...], ())
+print(x)
+[out]
+()
+
+[case testFromFuturePrintFunction_python2]
+from __future__ import print_function
+print('a', 'b')
+[out]
+a b
+
+[case testFromFutureImportUnicodeLiterals_python2]
+from __future__ import unicode_literals
+print '>', ['a', b'b', u'c']
+[out]
+> [u'a', 'b', u'c']
+
+[case testUnicodeLiteralsKwargs_python2]
+from __future__ import unicode_literals
+def f(**kwargs):  # type: (...) -> None
+  pass
+params = {'a': 'b'}
+f(**params)
+[out]
+
+[case testUnicodeStringKwargs_python2]
+def f(**kwargs):  # type: (...) -> None
+  pass
+params = {u'a': 'b'}
+f(**params)
+[out]
+
+[case testStrKwargs_python2]
+def f(**kwargs):  # type: (...) -> None
+  pass
+params = {'a': 'b'}
+f(**params)
+[out]
+
+[case testFromFutureImportUnicodeLiterals2_python2]
+from __future__ import unicode_literals
+def f(x): # type: (str) -> None
+  pass
+f(b'')
+f(u'')
+f('')
+[out]
+_program.py:5: error: Argument 1 to "f" has incompatible type "unicode"; expected "str"
+_program.py:6: error: Argument 1 to "f" has incompatible type "unicode"; expected "str"
+
+[case testStrUnicodeCompatibility_python2]
+import typing
+def f(s): # type: (unicode) -> None
+    pass
+f(u'')
+f('')
+[out]
+
+[case testStrUnicodeCompatibilityInBuiltins_python2]
+import typing
+'x'.count('x')
+'x'.count(u'x')
+[out]
+
+[case testTupleAsSubtypeOfSequence_python2]
+from typing import TypeVar, Sequence
+T = TypeVar('T')
+def f(a): # type: (Sequence[T]) -> None
+    print a
+f(tuple())
+[out]
+()
+
+[case testReadOnlyProperty_python2]
+import typing
+class A:
+    @property
+    def foo(self): # type: () -> int
+        return 1
+print(A().foo + 2)
+[out]
+3
+
+[case testIOTypes_python2]
+from typing import IO, TextIO, BinaryIO, Any
+class X(IO[str]): pass
+class Y(TextIO): pass
+class Z(BinaryIO): pass
+[out]
+
+[case testOpenReturnType_python2]
+import typing
+f = open('/tmp/xyz', 'w')
+f.write(u'foo')
+f.write('bar')
+f.close()
+[out]
+_program.py:3: error: Argument 1 to "write" of "IO" has incompatible type "unicode"; expected "str"
+
+[case testPrintFunctionWithFileArg_python2]
+from __future__ import print_function
+import typing
+if 1 == 2: # Don't want to run the code below, since it would create a file.
+    f = open('/tmp/xyz', 'w')
+    print('foo', file=f)
+    f.close()
+print('ok')
+[out]
+ok
+
+[case testStringIO_python2]
+import typing
+import io
+c = io.StringIO()
+c.write(u'\x89')
+print(repr(c.getvalue()))
+[out]
+u'\x89'
+
+[case testBytesIO_python2]
+import typing
+import io
+c = io.BytesIO()
+c.write('\x89')
+print(repr(c.getvalue()))
+[out]
+'\x89'
+
+[case testTextIOWrapper_python2]
+import typing
+import io
+b = io.BytesIO(u'\xab'.encode('utf8'))
+w = io.TextIOWrapper(b, encoding='utf8')
+print(repr(w.read()))
+[out]
+u'\xab'
+
+[case testIoOpen_python2]
+import typing
+import io
+if 1 == 2: # Only type check, do not execute
+    f = io.open('/tmp/xyz', 'w', encoding='utf8')
+    f.write(u'\xab')
+    f.close()
+print 'ok'
+[out]
+ok
+
+[case testUnionType_python2]
+from typing import Union
+y = None  # type: Union[int, str]
+def f(x): # type: (Union[int, str]) -> str
+    if isinstance(x, int):
+        x = str(x)
+    return x
+print f(12)
+print f('ab')
+[out]
+12
+ab
+
+[case testStrAdd_python2]
+import typing
+s = ''
+u = u''
+n = 0
+n = s + '' # E
+s = s + u'' # E
+[out]
+_program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+_program.py:6: error: Incompatible types in assignment (expression has type "unicode", variable has type "str")
+
+[case testStrJoin_python2]
+import typing
+s = ''
+u = u''
+n = 0
+n = ''.join([''])   # Error
+s = ''.join([u''])  # Error
+[out]
+_program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+_program.py:6: error: Incompatible types in assignment (expression has type "unicode", variable has type "str")
+
+[case testNamedTuple_python2]
+import typing
+from collections import namedtuple
+X = namedtuple('X', ['a', 'b'])
+x = X(a=1, b='s')
+print x.a, x.b
+[out]
+1 s
+
+[case testNamedTupleError_python2]
+import typing
+from collections import namedtuple
+X = namedtuple('X', ['a', 'b'])
+x = X(a=1, b='s')
+x.c
+[out]
+_program.py:5: error: "X" has no attribute "c"
+
+[case testAssignToComplexReal_python2]
+import typing
+x = 4j
+y = x.real
+y = x         # Error
+x.imag = 2.0  # Error
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "complex", variable has type "float")
+_program.py:5: error: Property "imag" defined in "complex" is read-only
+
+[case testComplexArithmetic_python2]
+import typing
+print 5 + 8j
+print 3j * 2.0
+print 4j / 2.0
+[out]
+(5+8j)
+6j
+2j
+
+[case testNamedTupleWithTypes_python2]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int), ('b', str)])
+n = N(1, 'x')
+print n
+a, b = n
+print a, b
+print n[0]
+[out]
+N(a=1, b='x')
+1 x
+1
+
+[case testUnionTypeAlias_python2]
+from typing import Union
+U = Union[int, str]
+u = 1 # type: U
+u = 1.1
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "float", variable has type "Union[int, str]")
+
+[case testSuperNew_python2]
+from typing import Dict, Any
+class MyType(type):
+    def __new__(cls, name, bases, namespace):
+        # type: (str, tuple, Dict[str, Any]) -> type
+        return super(MyType, cls).__new__(cls, name + 'x', bases, namespace)
+class A(object):
+    __metaclass__ = MyType
+print(type(A()).__name__)
+[out]
+Ax
+
+[case testSequenceIndexAndCount_python2]
+from typing import Sequence
+def f(x): # type: (Sequence[int]) -> None
+    print(x.index(1))
+    print(x.count(1))
+f([0, 0, 1, 1, 1])
+[out]
+2
+3
+
+[case testOptional_python2]
+from typing import Optional
+def f(): # type: () -> Optional[int]
+    pass
+x = f()
+y = 1
+y = x
+
+[case testUnicodeAndOverloading_python2]
+from m import f
+f(1)
+f('')
+f(u'')
+f(b'')
+[file m.pyi]
+from typing import overload
+ at overload
+def f(x): # type: (unicode) -> int
+  pass
+ at overload
+def f(x): # type: (bytearray) -> int
+  pass
+[out]
+_program.py:2: error: No overload variant of "f" matches argument types [builtins.int]
+
+[case testByteArrayStrCompatibility_python2]
+def f(x): # type: (str) -> None
+    pass
+f(bytearray('foo'))
+
+[case testAbstractProperty_python2]
+from abc import abstractproperty, ABCMeta
+class A:
+    __metaclass__ = ABCMeta
+    @abstractproperty
+    def x(self): # type: () -> int
+        pass
+class B(A):
+    @property
+    def x(self): # type: () -> int
+        return 3
+b = B()
+print b.x + 1
+[out]
+4
+
+[case testReModuleBytesPython2]
+# Regression tests for various overloads in the re module -- bytes version
+import re
+if False:
+    bre = b'a+'
+    bpat = re.compile(bre)
+    bpat = re.compile(bpat)
+    re.search(bre, b'').groups()
+    re.search(bre, u'')
+    re.search(bpat, b'').groups()
+    re.search(bpat, u'')
+    # match(), split(), findall(), finditer() are much the same, so skip those.
+    # sub(), subn() have more overloads and we are checking these:
+    re.sub(bre, b'', b'') + b''
+    re.sub(bpat, b'', b'') + b''
+    re.sub(bre, lambda m: b'', b'') + b''
+    re.sub(bpat, lambda m: b'', b'') + b''
+    re.subn(bre, b'', b'')[0] + b''
+    re.subn(bpat, b'', b'')[0] + b''
+    re.subn(bre, lambda m: b'', b'')[0] + b''
+    re.subn(bpat, lambda m: b'', b'')[0] + b''
+[out]
+
+[case testReModuleStringPython2]
+# Regression tests for various overloads in the re module -- string version
+import re
+ure = u'a+'
+upat = re.compile(ure)
+upat = re.compile(upat)
+re.search(ure, u'a').groups()
+re.search(ure, b'') # This ought to be an error, but isn't because of bytes->unicode equivalence
+re.search(upat, u'a').groups()
+re.search(upat, b'') # This ought to be an error, but isn't because of bytes->unicode equivalence
+# match(), split(), findall(), finditer() are much the same, so skip those.
+# sus(), susn() have more overloads and we are checking these:
+re.sub(ure, u'', u'') + u''
+re.sub(upat, u'', u'') + u''
+re.sub(ure, lambda m: u'', u'') + u''
+re.sub(upat, lambda m: u'', u'') + u''
+re.subn(ure, u'', u'')[0] + u''
+re.subn(upat, u'', u'')[0] + u''
+re.subn(ure, lambda m: u'', u'')[0] + u''
+re.subn(upat, lambda m: u'', u'')[0] + u''
+[out]
+
+[case testYieldRegressionTypingAwaitable_python2]
+# Make sure we don't reference typing.Awaitable in Python 2 mode.
+def g():  # type: () -> int
+    yield
+[out]
+_program.py:2: error: The return type of a generator function should be "Generator" or one of its supertypes
diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test
new file mode 100644
index 0000000..6d16903
--- /dev/null
+++ b/test-data/unit/pythoneval-asyncio.test
@@ -0,0 +1,486 @@
+-- Test cases for type checking mypy programs using full stubs and running
+-- using CPython.
+--
+-- These are mostly regression tests -- no attempt is made to make these
+-- complete.
+--
+-- This test file check Asyncio and yield from interaction
+
+[case testImportAsyncio]
+import asyncio
+print('Imported')
+[out]
+Imported
+
+[case testSimpleCoroutineSleep]
+from typing import Any, Generator
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def greet_every_two_seconds() -> 'Generator[Any, None, None]':
+    n = 0
+    while n < 5:
+        print('Prev', n)
+        yield from asyncio.sleep(0.1)
+        print('After', n)
+        n += 1
+
+loop = asyncio.get_event_loop()
+try:
+    loop.run_until_complete(greet_every_two_seconds())
+finally:
+    loop.close()
+[out]
+Prev 0
+After 0
+Prev 1
+After 1
+Prev 2
+After 2
+Prev 3
+After 3
+Prev 4
+After 4
+
+[case testCoroutineCallingOtherCoroutine]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def compute(x: int, y: int) -> 'Generator[Any, None, int]':
+    print("Compute %s + %s ..." % (x, y))
+    yield from asyncio.sleep(0.1)
+    return x + y   # Here the int is wrapped in Future[int]
+
+ at asyncio.coroutine
+def print_sum(x: int, y: int) -> 'Generator[Any, None, None]':
+    result = yield from compute(x, y)  # The type of result will be int (is extracted from Future[int]
+    print("%s + %s = %s" % (x, y, result))
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(print_sum(1, 2))
+loop.close()
+[out]
+Compute 1 + 2 ...
+1 + 2 = 3
+
+[case testCoroutineChangingFuture]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(0.1)
+    future.set_result('Future is done!')
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))
+loop.run_until_complete(future)
+print(future.result())
+loop.close()
+[out]
+Future is done!
+
+[case testFunctionAssignedAsCallback]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future, AbstractEventLoop
+
+ at asyncio.coroutine
+def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(1)
+    future.set_result('Callback works!')
+
+def got_result(future: 'Future[str]') -> None:
+    print(future.result())
+    loop.stop()
+
+loop = asyncio.get_event_loop() # type: AbstractEventLoop
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))  # Here create a task with the function. (The Task need a Future[T] as first argument)
+future.add_done_callback(got_result)  # and assign the callback to the future
+try:
+    loop.run_forever()
+finally:
+    loop.close()
+[out]
+Callback works!
+
+[case testMultipleTasks]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Task, Future
+ at asyncio.coroutine
+def factorial(name, number) -> 'Generator[Any, None, None]':
+    f = 1
+    for i in range(2, number+1):
+        print("Task %s: Compute factorial(%s)..." % (name, i))
+        yield from asyncio.sleep(0.1)
+        f *= i
+    print("Task %s: factorial(%s) = %s" % (name, number, f))
+
+loop = asyncio.get_event_loop()
+tasks = [
+    asyncio.Task(factorial("A", 2)),
+    asyncio.Task(factorial("B", 3)),
+    asyncio.Task(factorial("C", 4))]
+loop.run_until_complete(asyncio.wait(tasks))
+loop.close()
+[out]
+Task A: Compute factorial(2)...
+Task B: Compute factorial(2)...
+Task C: Compute factorial(2)...
+Task A: factorial(2) = 2
+Task B: Compute factorial(3)...
+Task C: Compute factorial(3)...
+Task B: factorial(3) = 6
+Task C: Compute factorial(4)...
+Task C: factorial(4) = 24
+
+
+[case testConcatenatedCoroutines]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def h4() -> 'Generator[Any, None, int]':
+    x = yield from future
+    return x
+
+ at asyncio.coroutine
+def h3() -> 'Generator[Any, None, int]':
+    x = yield from h4()
+    print("h3: %s" % x)
+    return x
+
+ at asyncio.coroutine
+def h2() -> 'Generator[Any, None, int]':
+    x = yield from h3()
+    print("h2: %s" % x)
+    return x
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    x = yield from h2()
+    print("h: %s" % x)
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[int]
+future.set_result(42)
+loop.run_until_complete(h())
+print("Outside %s" % future.result())
+loop.close()
+[out]
+h3: 42
+h2: 42
+h: 42
+Outside 42
+
+[case testConcatenatedCoroutinesReturningFutures]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def h4() -> 'Generator[Any, None, Future[int]]':
+    yield from asyncio.sleep(0.1)
+    f = asyncio.Future() #type: Future[int]
+    return f
+
+ at asyncio.coroutine
+def h3() -> 'Generator[Any, None, Future[Future[int]]]':
+    x = yield from h4()
+    x.set_result(42)
+    f = asyncio.Future() #type: Future[Future[int]]
+    f.set_result(x)
+    return f
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    print("Before")
+    x = yield from h3()
+    y = yield from x
+    z = yield from y
+    print(z)
+    def normalize(future):
+        # The str conversion seems inconsistent; not sure exactly why. Normalize
+        # the result.
+        return str(future).replace('<Future finished ', 'Future<')
+    print(normalize(y))
+    print(normalize(x))
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(h())
+loop.close()
+[out]
+Before
+42
+Future<result=42>
+Future<result=Future<result=42>>
+
+
+[case testCoroutineWithOwnClass]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+class A:
+    def __init__(self, x: int) -> None:
+        self.x = x
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    x = yield from future
+    print("h: %s" % x.x)
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[A]
+future.set_result(A(42))
+loop.run_until_complete(h())
+print("Outside %s" % future.result().x)
+loop.close()
+[out]
+h: 42
+Outside 42
+
+
+-- Errors
+
+[case testErrorAssigningCoroutineThatDontReturn]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def greet() -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(0.2)
+    print('Hello World')
+
+ at asyncio.coroutine
+def test() -> 'Generator[Any, None, None]':
+    yield from greet()
+    x = yield from greet()  # Error
+
+loop = asyncio.get_event_loop()
+try:
+    loop.run_until_complete(test())
+finally:
+    loop.close()
+[out]
+_program.py:13: error: Function does not return a value
+
+[case testErrorReturnIsNotTheSameType]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def compute(x: int, y: int) -> 'Generator[Any, None, int]':
+    print("Compute %s + %s ..." % (x, y))
+    yield from asyncio.sleep(0.1)
+    return str(x + y)   # Error
+
+ at asyncio.coroutine
+def print_sum(x: int, y: int) -> 'Generator[Any, None, None]':
+    result = yield from compute(x, y)
+    print("%s + %s = %s" % (x, y, result))
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(print_sum(1, 2))
+loop.close()
+
+[out]
+_program.py:9: error: Incompatible return value type (got "str", expected "int")
+
+[case testErrorSetFutureDifferentInternalType]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(1)
+    future.set_result(42)  # Error
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))
+loop.run_until_complete(future)
+print(future.result())
+loop.close()
+[out]
+_program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "int"; expected "str"
+
+
+[case testErrorUsingDifferentFutureType]
+from typing import Any, Generator
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(1)
+    future.set_result(42)
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))  # Error
+loop.run_until_complete(future)
+print(future.result())
+loop.close()
+[out]
+_program.py:12: error: Argument 1 to "slow_operation" has incompatible type Future[str]; expected Future[int]
+
+[case testErrorUsingDifferentFutureTypeAndSetFutureDifferentInternalType]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+asyncio.coroutine
+def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(1)
+    future.set_result('42')  #Try to set an str as result to a Future[int]
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))  # Error
+loop.run_until_complete(future)
+print(future.result())
+loop.close()
+[out]
+_program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "str"; expected "int"
+_program.py:12: error: Argument 1 to "slow_operation" has incompatible type Future[str]; expected Future[int]
+
+[case testErrorSettingCallbackWithDifferentFutureType]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future, AbstractEventLoop
+
+ at asyncio.coroutine
+def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(1)
+    future.set_result('Future is done!')
+
+def got_result(future: 'Future[int]') -> None:
+    print(future.result())
+    loop.stop()
+
+loop = asyncio.get_event_loop() # type: AbstractEventLoop
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))
+future.add_done_callback(got_result)  # Error
+
+try:
+    loop.run_forever()
+finally:
+    loop.close()
+[out]
+_program.py:18: error: Argument 1 to "add_done_callback" of "Future" has incompatible type Callable[[Future[int]], None]; expected Callable[[Future[str]], Any]
+
+[case testErrorOneMoreFutureInReturnType]
+import typing
+from typing import Any, Generator
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def h4() -> 'Generator[Any, None, Future[int]]':
+    yield from asyncio.sleep(1)
+    f = asyncio.Future() #type: Future[int]
+    return f
+
+ at asyncio.coroutine
+def h3() -> 'Generator[Any, None, Future[Future[Future[int]]]]':
+    x = yield from h4()
+    x.set_result(42)
+    f = asyncio.Future() #type: Future[Future[int]]
+    f.set_result(x)
+    return f
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    print("Before")
+    x = yield from h3()
+    y = yield from x
+    z = yield from y
+    print(z)
+    print(y)
+    print(x)
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(h())
+loop.close()
+[out]
+_program.py:18: error: Incompatible return value type (got Future[Future[int]], expected Future[Future[Future[int]]])
+
+[case testErrorOneLessFutureInReturnType]
+import typing
+from typing import Any, Generator
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def h4() -> 'Generator[Any, None, Future[int]]':
+    yield from asyncio.sleep(1)
+    f = asyncio.Future() #type: Future[int]
+    return f
+
+ at asyncio.coroutine
+def h3() -> 'Generator[Any, None, Future[int]]':
+    x = yield from h4()
+    x.set_result(42)
+    f = asyncio.Future() #type: Future[Future[int]]
+    f.set_result(x)
+    return f
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    print("Before")
+    x = yield from h3()
+    y = yield from x
+    print(y)
+    print(x)
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(h())
+loop.close()
+[out]
+_program.py:18: error: Incompatible return value type (got Future[Future[int]], expected Future[int])
+
+[case testErrorAssignmentDifferentType]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+class A:
+    def __init__(self, x: int) -> None:
+        self.x = x
+
+class B:
+    def __init__(self, x: int) -> None:
+        self.x = x
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    x = yield from future # type: B # Error
+    print("h: %s" % x.x)
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[A]
+future.set_result(A(42))
+loop.run_until_complete(h())
+loop.close()
+[out]
+_program.py:16: error: Incompatible types in assignment (expression has type "A", variable has type "B")
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
new file mode 100644
index 0000000..2287e0e
--- /dev/null
+++ b/test-data/unit/pythoneval.test
@@ -0,0 +1,1396 @@
+-- Test cases for type checking mypy programs using full stubs and running
+-- using CPython.
+--
+-- These are mostly regression tests -- no attempt is made to make these
+-- complete.
+
+
+[case testHello]
+import typing
+print('hello, world')
+[out]
+hello, world
+
+-- Skipped because different typing package versions have different repr()s.
+[case testAbstractBaseClasses-skip]
+import re
+from typing import Sized, Sequence, Iterator, Iterable, Mapping, AbstractSet
+
+def check(o, t):
+    rep = re.sub('0x[0-9a-fA-F]+', '0x...', repr(o))
+    rep = rep.replace('sequenceiterator', 'str_iterator')
+    trep = str(t).replace('_abcoll.Sized', 'collections.abc.Sized')
+    print(rep, trep, isinstance(o, t))
+
+def f():
+    check('x', Sized)
+    check([1], Sequence)
+    check({1:3}, Sequence)
+    check(iter('x'), Iterator)
+    check('x', Iterable)
+    check({}, Mapping)
+    check(set([1]), AbstractSet)
+
+f()
+[out]
+'x' <class 'collections.abc.Sized'> True
+[1] typing.Sequence True
+{1: 3} typing.Sequence False
+<str_iterator object at 0x...> typing.Iterator True
+'x' typing.Iterable True
+{} typing.Mapping True
+{1} typing.AbstractSet True
+
+[case testSized]
+from typing import Sized
+class A(Sized):
+    def __len__(self): return 5
+print(len(A()))
+[out]
+5
+
+[case testReversed]
+from typing import Reversible
+class A(Reversible):
+    def __iter__(self): return iter('oof')
+    def __reversed__(self): return iter('foo')
+print(list(reversed(range(5))))
+print(list(reversed([1,2,3])))
+print(list(reversed('abc')))
+print(list(reversed(A())))
+[out]
+-- Duplicate [ at line beginning.
+[[4, 3, 2, 1, 0]
+[[3, 2, 1]
+[['c', 'b', 'a']
+[['f', 'o', 'o']
+
+[case testIntAndFloatConversion]
+from typing import SupportsInt, SupportsFloat
+class A(SupportsInt):
+    def __int__(self): return 5
+class B(SupportsFloat):
+    def __float__(self): return 1.2
+print(int(1))
+print(int(6.2))
+print(int('3'))
+print(int(b'4'))
+print(int(A()))
+print(float(-9))
+print(float(B()))
+[out]
+1
+6
+3
+4
+5
+-9.0
+1.2
+
+[case testAbs]
+from typing import SupportsAbs
+class A(SupportsAbs[float]):
+    def __abs__(self) -> float: return 5.5
+
+print(abs(-1))
+print(abs(-1.2))
+print(abs(A()))
+[out]
+1
+1.2
+5.5
+
+[case testAbs2]
+
+n = None  # type: int
+f = None  # type: float
+n = abs(1)
+abs(1) + 'x'  # Error
+f = abs(1.1)
+abs(1.1) + 'x'  # Error
+[out]
+_program.py:5: error: Unsupported operand types for + ("int" and "str")
+_program.py:7: error: Unsupported operand types for + ("float" and "str")
+
+[case testRound]
+from typing import SupportsRound
+class A(SupportsRound):
+    def __round__(self, ndigits=0): return 'x%d' % ndigits
+print(round(1.6))
+print(round(A()))
+print(round(A(), 2))
+[out]
+2
+x0
+x2
+
+[case testCallMethodViaTypeObject]
+import typing
+print(list.__add__([1, 2], [3, 4]))
+[out]
+[[1, 2, 3, 4]
+
+[case testClassDataAttribute]
+import typing
+class A:
+    x = 0
+print(A.x)
+A.x += 1
+print(A.x)
+[out]
+0
+1
+
+[case testInheritedClassAttribute]
+import typing
+class A:
+    x = 1
+    def f(self) -> None: print('f')
+class B(A):
+    pass
+B.f(None)
+print(B.x)
+[out]
+f
+1
+
+[case testFunctionDecorator]
+from typing import TypeVar, cast
+ftype = TypeVar('ftype')
+def logged(f: ftype) -> ftype:
+    def g(*args, **kwargs):
+        print('enter', f.__name__)
+        r = f(*args, **kwargs)
+        print('exit', f.__name__)
+        return r
+    return cast(ftype, g)
+
+ at logged
+def foo(s: str) -> str:
+    print('foo', s)
+    return s + '!'
+
+print(foo('y'))
+print(foo('x'))
+[out]
+enter foo
+foo y
+exit foo
+y!
+enter foo
+foo x
+exit foo
+x!
+
+[case testModuleAttributes]
+import math
+import typing
+print(math.__name__)
+print(type(math.__dict__))
+print(type(math.__doc__ or ''))
+print(math.__class__)
+[out]
+math
+<class 'dict'>
+<class 'str'>
+<class 'module'>
+
+[case testSpecialAttributes]
+import typing
+class A: pass
+print(object().__doc__)
+print(A().__class__)
+[out]
+The most base type
+<class '__main__.A'>
+
+[case testFunctionAttributes]
+import typing
+ord.__class__
+print(type(ord.__doc__ + ''))
+print(ord.__name__)
+print(ord.__module__)
+[out]
+<class 'str'>
+ord
+builtins
+
+[case testTypeAttributes]
+import typing
+print(str.__class__)
+print(type(str.__doc__))
+print(str.__name__)
+print(str.__module__)
+print(str.__dict__ is not None)
+[out]
+<class 'type'>
+<class 'str'>
+str
+builtins
+True
+
+[case testBoolCompatibilityWithInt]
+import typing
+x = 0
+x = True
+print(bool('x'))
+print(bool(''))
+[out]
+True
+False
+
+[case testCallBuiltinTypeObjectsWithoutArguments]
+import typing
+print(int())
+print(repr(str()))
+print(repr(bytes()))
+print(float())
+print(bool())
+[out]
+0
+''
+b''
+0.0
+False
+
+[case testIntegerDivision]
+import typing
+x = 1 / 2
+x = 1.5
+[out]
+
+[case testStaticmethod]
+import typing
+class A:
+    @staticmethod
+    def f(x: str) -> int: return int(x)
+print(A.f('12'))
+print(A().f('34'))
+[out]
+12
+34
+
+[case testClassmethod]
+import typing
+class A:
+    @classmethod
+    def f(cls, x: str) -> int: return int(x)
+print(A.f('12'))
+print(A().f('34'))
+[out]
+12
+34
+
+[case testIntMethods]
+import typing
+print(int.from_bytes(b'ab', 'big'))
+n = 0
+print(n.from_bytes(b'ac', 'big'))
+print(n.from_bytes([2, 3], 'big'))
+print(n.to_bytes(2, 'big'))
+[out]
+24930
+24931
+515
+b'\x00\x00'
+
+[case testFloatMethods]
+import typing
+print(1.5.as_integer_ratio())
+print(1.5.hex())
+print(2.0.is_integer())
+print(float.fromhex('0x1.8'))
+[out]
+(3, 2)
+0x1.8000000000000p+0
+True
+1.5
+
+[case testArray]
+import typing
+import array
+array.array('b', [1, 2])
+[out]
+
+[case testDictFromkeys]
+import typing
+d = dict.fromkeys('foo')
+d['x'] = 2
+d2 = dict.fromkeys([1, 2], b'')
+d2[2] = b'foo'
+[out]
+
+[case testReadOnlyProperty]
+class A:
+    x = 2
+    @property
+    def f(self) -> int:
+        return self.x + 1
+print(A().f)
+[out]
+3
+
+[case testIsinstanceWithTuple]
+from typing import cast, Any
+x = cast(Any, (1, 'x'))
+if isinstance(x, tuple):
+    print(x[0], x[1])
+[out]
+1 x
+
+[case testTypevarValues]
+from typing import TypeVar
+T = TypeVar('T', str, bytes)
+def f(x: T) -> T:
+    if isinstance(x, str):
+        return 'foo'
+    else:
+        return b'bar'
+print(f(''))
+print(f(b''))
+[out]
+foo
+b'bar'
+
+[case testAnyStr]
+from typing import AnyStr
+def f(x: AnyStr) -> AnyStr:
+    if isinstance(x, str):
+        return 'foo'
+    else:
+        return b'zar'
+print(f(''))
+print(f(b''))
+[out]
+foo
+b'zar'
+
+[case testNameNotImportedFromTyping]
+import typing
+cast(int, 2)
+[out]
+_program.py:2: error: Name 'cast' is not defined
+
+[case testBinaryIOType]
+from typing import BinaryIO
+def f(f: BinaryIO) -> None:
+    f.write(b'foo')
+    f.write(bytearray(b'foo'))
+[out]
+
+[case testIOTypes]
+from typing import IO
+import sys
+def txt(f: IO[str]) -> None:
+    f.write('foo')
+    f.write(b'foo')
+def bin(f: IO[bytes]) -> None:
+    f.write(b'foo')
+    f.write(bytearray(b'foo'))
+txt(sys.stdout)
+bin(sys.stdout)
+[out]
+_program.py:5: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str"
+_program.py:10: error: Argument 1 to "bin" has incompatible type "TextIO"; expected IO[bytes]
+
+[case testBuiltinOpen]
+f = open('x')
+f.write('x')
+f.write(b'x')
+f.foobar()
+[out]
+_program.py:3: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str"
+_program.py:4: error: "TextIO" has no attribute "foobar"
+
+[case testOpenReturnTypeInference]
+reveal_type(open('x'))
+reveal_type(open('x', 'r'))
+reveal_type(open('x', 'rb'))
+mode = 'rb'
+reveal_type(open('x', mode))
+[out]
+_program.py:1: error: Revealed type is 'typing.TextIO'
+_program.py:2: error: Revealed type is 'typing.TextIO'
+_program.py:3: error: Revealed type is 'typing.BinaryIO'
+_program.py:5: error: Revealed type is 'typing.IO[Any]'
+
+[case testOpenReturnTypeInferenceSpecialCases]
+reveal_type(open())
+reveal_type(open(mode='rb', file='x'))
+reveal_type(open(file='x', mode='rb'))
+mode = 'rb'
+reveal_type(open(mode=mode, file='r'))
+[out]
+_testOpenReturnTypeInferenceSpecialCases.py:1: error: Revealed type is 'typing.TextIO'
+_testOpenReturnTypeInferenceSpecialCases.py:1: error: Too few arguments for "open"
+_testOpenReturnTypeInferenceSpecialCases.py:2: error: Revealed type is 'typing.BinaryIO'
+_testOpenReturnTypeInferenceSpecialCases.py:3: error: Revealed type is 'typing.BinaryIO'
+_testOpenReturnTypeInferenceSpecialCases.py:5: error: Revealed type is 'typing.IO[Any]'
+
+[case testGenericPatterns]
+from typing import Pattern
+import re
+p = None  # type: Pattern[str]
+p = re.compile('foo*')
+b = None  # type: Pattern[bytes]
+b = re.compile(b'foo*')
+print(p.match('fooo').group(0))
+[out]
+fooo
+
+[case testGenericMatch]
+from typing import Match
+import re
+def f(m: Match[bytes]) -> None:
+    print(m.group(0))
+f(re.match(b'x*', b'xxy'))
+[out]
+b'xx'
+
+[case testMultipleTypevarsWithValues]
+from typing import TypeVar
+
+T = TypeVar('T', int, str)
+S = TypeVar('S', int, str)
+
+def f(t: T, s: S) -> None:
+    t + s
+[out]
+_program.py:7: error: Unsupported operand types for + ("int" and "str")
+_program.py:7: error: Unsupported operand types for + ("str" and "int")
+
+[case testSystemExitCode]
+import typing
+print(SystemExit(5).code)
+[out]
+5
+
+[case testIntFloatDucktyping]
+
+x = None  # type: float
+x = 2.2
+x = 2
+def f(x: float) -> None: pass
+f(1.1)
+f(1)
+[out]
+
+[case testsFloatOperations]
+import typing
+print(1.5 + 1.5)
+print(1.5 + 1)
+[out]
+3.0
+2.5
+
+[case testMathFunctionWithIntArgument]
+import typing
+import math
+math.sin(2)
+math.sin(2.2)
+
+[case testAbsReturnType]
+
+f = None  # type: float
+n = None  # type: int
+n = abs(2)
+f = abs(2.2)
+abs(2.2) + 'x'
+[out]
+_program.py:6: error: Unsupported operand types for + ("float" and "str")
+
+[case testROperatorMethods]
+
+b = None  # type: bytes
+s = None  # type: str
+s = b'foo' * 5 # Error
+b = 5 * b'foo'
+b = b'foo' * 5
+s = 5 * 'foo'
+s = 'foo' * 5
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "bytes", variable has type "str")
+
+[case testROperatorMethods2]
+import typing
+print(2 / 0.5)
+print(' ', 2 * [3, 4])
+[out]
+4.0
+  [3, 4, 3, 4]
+
+[case testNotImplemented]
+import typing
+class A:
+    def __add__(self, x: int) -> int:
+        if isinstance(x, int):
+            return x + 1
+        return NotImplemented
+class B:
+    def __radd__(self, x: A) -> str:
+        return 'x'
+print(A() + 1)
+print(A() + B())
+[out]
+2
+x
+
+[case testMappingMethods]
+# Regression test
+from typing import Mapping
+x = {'x': 'y'} # type: Mapping[str, str]
+print('x' in x)
+print('y' in x)
+[out]
+True
+False
+
+[case testOverlappingOperatorMethods]
+
+class X: pass
+class A:
+    def __add__(self, x) -> int:
+        if isinstance(x, X):
+            return 1
+        return NotImplemented
+class B:
+    def __radd__(self, x: A) -> str: return 'x'
+class C(X, B): pass
+b = None  # type: B
+b = C()
+print(A() + b)
+[out]
+_program.py:9: error: Signatures of "__radd__" of "B" and "__add__" of "A" are unsafely overlapping
+
+[case testBytesAndBytearrayComparisons]
+import typing
+print(b'ab' < bytearray(b'b'))
+print(bytearray(b'ab') < b'a')
+[out]
+True
+False
+
+[case testBytesAndBytearrayComparisons2]
+import typing
+'' < b''
+b'' < ''
+'' < bytearray()
+bytearray() < ''
+[out]
+_program.py:2: error: Unsupported operand types for > ("bytes" and "str")
+_program.py:3: error: Unsupported operand types for > ("str" and "bytes")
+_program.py:4: error: Unsupported operand types for > ("bytearray" and "str")
+_program.py:5: error: Unsupported operand types for > ("str" and "bytearray")
+
+[case testInplaceOperatorMethod]
+import typing
+a = [1]
+print('', a.__iadd__([2]))
+print('', a)
+[out]
+ [1, 2]
+ [1, 2]
+
+[case testListInplaceAdd]
+import typing
+a = [1]
+a += iter([2, 3])
+print(tuple(a))
+[out]
+(1, 2, 3)
+
+[case testListConcatenateWithIterable]
+import typing
+[1] + iter([2, 3])
+[out]
+_program.py:2: error: Unsupported operand types for + (List[int] and Iterator[int])
+
+[case testInferHeterogeneousListOfIterables]
+from typing import Sequence
+s = ['x', 'y'] # type: Sequence[str]
+a = [['x', 'x'], 'fo', s, iter('foo'), {'aa'}]
+for i, x in enumerate(a):
+    print(i, next(iter(x)))
+[out]
+0 x
+1 f
+2 x
+3 f
+4 aa
+
+[case testTextIOProperties]
+import typing
+import sys
+print(type(sys.stdin.encoding))
+print(type(sys.stdin.errors))
+sys.stdin.line_buffering
+sys.stdin.buffer
+sys.stdin.newlines
+[out]
+<class 'str'>
+<class 'str'>
+
+[case testIOProperties]
+import typing
+import sys
+print(sys.stdin.name)
+print(sys.stdin.buffer.mode)
+[out]
+<stdin>
+rb
+
+[case testSetUnion]
+import typing
+s = {'x', 'y'}
+print('>', sorted(s.union('foo')))
+[out]
+> ['f', 'o', 'x', 'y']
+
+[case testFromFuturePrintFunction]
+from __future__ import print_function
+print('a', 'b')
+[out]
+a b
+
+[case testLenOfTuple]
+import typing
+print(len((1, 'x')))
+[out]
+2
+
+[case testListMethods]
+import typing
+import sys
+l = [0, 1, 2, 3, 4]
+if sys.version >= '3.3':
+    l.clear()
+else:
+    l = []
+l.append(0)
+print('>', l)
+if sys.version >= '3.3':
+    m = l.copy()
+else:
+    m = l[:]
+m.extend([1, 2, 3, 4])
+print('>', m)
+print(l.index(0))
+print(l.index(0, 0))
+print(l.index(0, 0, 1))
+try:
+    print(l.index(1))
+    print('expected ValueError')
+except ValueError:
+    pass
+l.insert(0, 1)
+print('>', l)
+print(l.pop(0))
+print(l.pop())
+m.remove(0)
+try:
+    m.remove(0)
+    print('expected ValueError')
+except ValueError:
+    pass
+m.reverse()
+m.sort()
+m.sort(key=lambda x: -x)
+m.sort(reverse=False)
+m.sort(key=lambda x: -x, reverse=True)
+print('>', m)
+[out]
+> [0]
+> [0, 1, 2, 3, 4]
+0
+0
+0
+> [1, 0]
+1
+0
+> [1, 2, 3, 4]
+
+[case testListOperators]
+import typing
+l = [0, 1]
+print('+', l + [2])
+print('*', l * 2)
+print('*', 2 * l)
+print('in', 1 in l)
+print('==', l == [1, 2])
+print('!=', l != [1, 2])
+print('>', l > [1, 2, 3])
+print('>=', l >= [1, 2, 3])
+print('<', l < [1, 2, 3])
+print('<=', l <= [1, 2, 3])
+print('>[0]', l[0])
+l += [2]
+print('+=', l)
+l *= 2
+print('*=', l)
+print('iter', list(iter(l)))
+print('len', len(l))
+print('repr', repr(l))
+l[:3] = []
+print('setslice', l)
+print('reversed', list(reversed(l)))
+[out]
++ [0, 1, 2]
+* [0, 1, 0, 1]
+* [0, 1, 0, 1]
+in True
+== False
+!= True
+> False
+>= False
+< True
+<= True
+>[0] 0
++= [0, 1, 2]
+*= [0, 1, 2, 0, 1, 2]
+iter [0, 1, 2, 0, 1, 2]
+len 6
+repr [0, 1, 2, 0, 1, 2]
+setslice [0, 1, 2]
+reversed [2, 1, 0]
+
+[case testTupleAsSubtypeOfSequence]
+from typing import TypeVar, Sequence
+T = TypeVar('T')
+def f(a: Sequence[T]) -> None: print(a)
+f(tuple())
+[out]
+()
+
+[case testMapWithLambdaSpecialCase-skip]
+# TODO: Fix this; this was broken at some point but not sure why.
+from typing import List, Iterator
+a = [[1], [3]]
+b = map(lambda y: y[0], a)
+print('>', list(b))
+[out]
+> [1, 3]
+
+[case testInternalBuiltinDefinition]
+import typing
+def f(x: _T) -> None: pass
+s: FrozenSet
+[out]
+_program.py:2: error: Name '_T' is not defined
+_program.py:3: error: Name 'FrozenSet' is not defined
+
+[case testVarArgsFunctionSubtyping]
+import typing
+def f(*args: str) -> str: return args[0]
+map(f, ['x'])
+map(f, [1])
+[out]
+_program.py:4: error: Argument 1 to "map" has incompatible type Callable[[VarArg(str)], str]; expected Callable[[int], str]
+
+[case testMapStr]
+import typing
+x = range(3)
+a = list(map(str, x))
+a + 1
+[out]
+_program.py:4: error: Unsupported operand types for + (List[str] and "int")
+
+[case testNamedTuple]
+import typing
+from collections import namedtuple
+X = namedtuple('X', ['a', 'b'])
+x = X(a=1, b='s')
+print(x.a, x.b)
+[out]
+1 s
+
+[case testNamedTupleShortSyntax]
+import typing
+from collections import namedtuple
+X = namedtuple('X', ' a  b ')
+x = X(a=1, b='s')
+print(x.a, x.b)
+[out]
+1 s
+
+[case testNamedTupleError]
+import typing
+from collections import namedtuple
+X = namedtuple('X', ['a', 'b'])
+x = X(a=1, b='s')
+x.c
+[out]
+_program.py:5: error: "X" has no attribute "c"
+
+[case testNamedTupleTupleOperations]
+from typing import Iterable
+from collections import namedtuple
+X = namedtuple('X', ['a', 'b'])
+def f(x: Iterable[int]) -> None: pass
+x = X(a=1, b='s')
+f(x)
+print(len(x))
+print(x.index(1))
+print(x.count(1))
+print(x + x)
+[out]
+2
+0
+1
+(1, 's', 1, 's')
+
+[case testNamedTupleWithTypes]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int), ('b', str)])
+n = N(1, 'x')
+print(n)
+a, b = n
+print(a, b)
+print(n[0])
+[out]
+N(a=1, b='x')
+1 x
+1
+
+[case testRelativeImport]
+import typing
+from m import x
+print(x)
+[file m/__init__.py]
+from .n import x
+[file m/n.py]
+x = 1
+[out]
+1
+
+[case testRelativeImport2]
+import typing
+from m.n import x
+print(x)
+[file m/__init__.py]
+[file m/n.py]
+from .nn import x
+[file m/nn.py]
+x = 2
+[out]
+2
+
+[case testPyiTakesPrecedenceOverPy]
+import m
+m.f(1)
+[file m.py]
+def f(x):
+    print(x)
+[file m.pyi]
+import typing
+def f(x: str) -> None: pass
+[out]
+_program.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str"
+
+[case testAssignToComplexReal]
+import typing
+x = 4j
+y = x.real
+y = x         # Error
+x.real = 2.0  # Error
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "complex", variable has type "float")
+_program.py:5: error: Property "real" defined in "complex" is read-only
+
+[case testComplexArithmetic]
+import typing
+print(5 + 8j)
+print(3j * 2.0)
+print(4J / 2.0)
+[out]
+(5+8j)
+6j
+2j
+
+[case testComplexArithmetic2]
+import typing
+x = 5 + 8j
+x = ''
+y = 3j * 2.0
+y = ''
+[out]
+_program.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "complex")
+_program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "complex")
+
+[case testUnionTypeAlias]
+from typing import Union
+U = Union[int, str]
+u = 1 # type: U
+u = 1.1
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "float", variable has type "Union[int, str]")
+
+[case testTupleTypeAlias]
+from typing import Tuple
+A = Tuple[int, str]
+u = 1, 'x' # type: A
+u = 1
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "int", variable has type "Tuple[int, str]")
+
+[case testCallableTypeAlias]
+from typing import Callable
+A = Callable[[int], None]
+def f(x: A) -> None:
+    x(1)
+    x('')
+[out]
+_program.py:5: error: Argument 1 has incompatible type "str"; expected "int"
+
+[case testSuperNew]
+from typing import Dict, Any
+class MyType(type):
+    def __new__(cls, name: str, bases: tuple, namespace: Dict[str, Any]) -> type:
+        return super().__new__(cls, name + 'x', bases, namespace)
+class A(metaclass=MyType): pass
+print(type(A()).__name__)
+[out]
+Ax
+
+[case testSequenceIndexAndCount]
+from typing import Sequence
+def f(x: Sequence[int]) -> None:
+    print(x.index(1))
+    print(x.count(1))
+f([0, 0, 1, 1, 1])
+[out]
+2
+3
+
+[case testEscapeInTripleQuotedStrLiteral]
+print('''\'''')
+print(r"""\"""$""")
+[out]
+'
+\"""$
+
+[case testSubclassBothGenericAndNonGenericABC]
+from typing import Generic, TypeVar
+from abc import ABCMeta
+T = TypeVar('T')
+class A(metaclass=ABCMeta): pass
+class B(Generic[T]): pass
+class C(A, B): pass
+class D(B, A): pass
+class E(A, B[T], Generic[T]): pass
+class F(B[T], A, Generic[T]): pass
+def f(e: E[int], f: F[int]) -> None: pass
+[out]
+
+[case testOptional]
+from typing import Optional
+def f() -> Optional[int]: pass
+x = f()
+y = 1
+y = x
+
+[case testAppendToStarArg]
+import typing
+def f(*x: int) -> None:
+    x.append(1)
+f(1)
+[out]
+_program.py:3: error: Tuple[int, ...] has no attribute "append"
+
+[case testExit]
+print('a')
+exit(2)
+print('b')
+[out]
+a
+
+[case testTypeVariableTypeComparability]
+from typing import TypeVar
+T = TypeVar('T')
+def eq(x: T, y: T, z: T) -> T:
+    if x == y:
+        return y
+    else:
+        return z
+print(eq(1, 2, 3))
+print(eq('x', 'x', 'z'))
+[out]
+3
+x
+
+[case testIntDecimalCompatibility]
+import typing
+from decimal import Decimal
+print(Decimal(1) + 2)
+print(Decimal(1) - 2)
+print(1 + Decimal('2.34'))
+print(1 - Decimal('2.34'))
+print(2 * Decimal('2.34'))
+[out]
+3
+-1
+3.34
+-1.34
+4.68
+
+[case testInstantiateBuiltinTypes]
+from typing import Dict, Set, List
+d = dict()  # type: Dict[int, str]
+s = set()   # type: Set[int]
+l = list()  # type: List[int]
+str()
+bytes()
+bytearray()
+int()
+float()
+complex()
+slice(1)
+bool()
+
+[case testVariableLengthTuple]
+from typing import Tuple
+def p(t: Tuple[int, ...]) -> None:
+    for n in t:
+        print(n)
+p((1, 3, 2))
+[out]
+1
+3
+2
+
+[case testVariableLengthTupleError]
+from typing import Tuple
+def p(t: Tuple[str, ...]) -> None:
+    n = 5
+    print(t[n])
+    for s in t:
+        s()
+''.startswith(('x', 'y'))
+''.startswith(('x', b'y'))
+[out]
+_program.py:6: error: "str" not callable
+_program.py:8: error: Argument 1 to "startswith" of "str" has incompatible type "Tuple[str, bytes]"; expected "Union[str, Tuple[str, ...]]"
+
+[case testMultiplyTupleByInteger]
+n = 4
+t = ('',) * n
+t + 1
+[out]
+_program.py:3: error: Unsupported operand types for + (Tuple[str, ...] and "int")
+
+[case testMultiplyTupleByIntegerReverse]
+n = 4
+t = n * ('',)
+t + 1
+[out]
+_program.py:3: error: Unsupported operand types for + (Tuple[str, ...] and "int")
+
+[case testDictWithKeywordArgs]
+from typing import Dict, Any, List
+d1 = dict(a=1, b=2) # type: Dict[str, int]
+d2 = dict(a=1, b='') # type: Dict[str, int] # E
+d3 = dict(a=1, b=1)
+d3.xyz # E
+d4 = dict(a=1, b='') # type: Dict[str, Any]
+result = dict(x=[], y=[]) # type: Dict[str, List[str]]
+[out]
+_program.py:3: error: Dict entry 1 has incompatible type "str": "str"
+_program.py:5: error: Dict[str, int] has no attribute "xyz"
+
+[case testDefaultDict]
+import typing as t
+from collections import defaultdict
+
+T = t.TypeVar('T')
+
+d1 = defaultdict(list) # type: t.DefaultDict[int, str]
+d2 = defaultdict() # type: t.DefaultDict[int, str]
+d2[0] = '0'
+d2['0'] = 0
+
+def tst(dct: t.DefaultDict[int, T]) -> T:
+    return dct[0]
+
+collections = ['coins', 'stamps', 'comics'] # type: t.List[str]
+d3 = defaultdict(str) # type: t.DefaultDict[int, str]
+collections[2]
+
+tst(defaultdict(list, {0: []}))
+tst(defaultdict(list, {'0': []}))
+
+class MyDDict(t.DefaultDict[int,T], t.Generic[T]):
+    pass
+MyDDict(dict)['0']
+MyDDict(dict)[0]
+[out]
+_program.py:6: error: Argument 1 to "defaultdict" has incompatible type Type[List[Any]]; expected Callable[[], str]
+_program.py:9: error: Invalid index type "str" for defaultdict[int, str]; expected type "int"
+_program.py:9: error: Incompatible types in assignment (expression has type "int", target has type "str")
+_program.py:19: error: Dict entry 0 has incompatible type "str": List[<nothing>]
+_program.py:23: error: Invalid index type "str" for MyDDict[Dict[_KT, _VT]]; expected type "int"
+
+[case testNoSubcriptionOfStdlibCollections]
+import collections
+from collections import Counter
+from typing import TypeVar
+
+collections.defaultdict[int, str]()
+Counter[int]()
+
+T = TypeVar('T')
+DDint = collections.defaultdict[T, int]
+
+d = DDint[str]()
+d[0] = 1
+
+def f(d: collections.defaultdict[int, str]) -> None:
+    ...
+[out]
+_program.py:5: error: "defaultdict" is not subscriptable
+_program.py:6: error: "Counter" is not subscriptable
+_program.py:9: error: "defaultdict" is not subscriptable
+_program.py:12: error: Invalid index type "int" for defaultdict[str, int]; expected type "str"
+_program.py:14: error: "defaultdict" is not subscriptable, use "typing.DefaultDict" instead
+
+[case testCollectionsAliases]
+import typing as t
+import collections as c
+
+o1 = c.Counter()  # type: t.Counter[int]
+reveal_type(o1)
+o1['string']
+
+o2 = c.ChainMap()  # type: t.ChainMap[int, str]
+reveal_type(o2)
+
+o3 = c.deque()  # type: t.Deque[int]
+reveal_type(o3)
+
+o4 = t.Counter[int]()
+reveal_type(o4)
+
+o5 = t.ChainMap[int, str]()
+reveal_type(o5)
+
+o6 = t.Deque[int]()
+reveal_type(o6)
+
+[out]
+_testCollectionsAliases.py:5: error: Revealed type is 'collections.Counter[builtins.int]'
+_testCollectionsAliases.py:6: error: Invalid index type "str" for Counter[int]; expected type "int"
+_testCollectionsAliases.py:9: error: Revealed type is 'collections.ChainMap[builtins.int, builtins.str]'
+_testCollectionsAliases.py:12: error: Revealed type is 'collections.deque[builtins.int]'
+_testCollectionsAliases.py:15: error: Revealed type is 'collections.Counter[builtins.int*]'
+_testCollectionsAliases.py:18: error: Revealed type is 'collections.ChainMap[builtins.int*, builtins.str*]'
+_testCollectionsAliases.py:21: error: Revealed type is 'collections.deque[builtins.int*]'
+
+[case testChainMapUnimported]
+ChainMap[int, str]()
+
+[out]
+_testChainMapUnimported.py:1: error: Name 'ChainMap' is not defined
+
+[case testDequeWrongCase]
+import collections
+import typing
+
+collections.Deque()
+typing.deque()
+
+[out]
+_testDequeWrongCase.py:4: error: Module has no attribute "Deque"
+_testDequeWrongCase.py:5: error: Module has no attribute "deque"
+
+[case testDictUpdateInference]
+from typing import Dict, Optional
+d = {}  # type: Dict[str, Optional[int]]
+d.update({str(i): None for i in range(4)})
+
+[case testSuperAndSetattr]
+class A:
+    def __init__(self) -> None:
+        super().__setattr__('a', 1)
+        super().__setattr__(1, 'a')
+[out]
+_program.py:4: error: Argument 1 to "__setattr__" of "object" has incompatible type "int"; expected "str"
+
+[case testMetaclassAndSuper]
+class A(type):
+    def __new__(cls, name, bases, namespace) -> 'type':
+        return super().__new__(cls, '', (object,), {'x': 7})
+
+class B(metaclass=A):
+    pass
+
+print(getattr(B(), 'x'))
+[out]
+7
+
+[case testSortedNoError]
+from typing import Iterable, Callable, TypeVar, List, Dict
+T = TypeVar('T')
+def sorted(x: Iterable[T], *, key: Callable[[T], object] = None) -> None: ...
+a = None # type: List[Dict[str, str]]
+sorted(a, key=lambda y: y[''])
+
+[case testAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A):
+    @property
+    def x(self) -> int:
+        return 3
+b = B()
+print(b.x + 1)
+[out]
+4
+
+[case testInferenceWithLambda]
+from typing import TypeVar, Iterable, Iterator, List
+import itertools
+
+_T = TypeVar('_T')
+
+def f(iterable): # type: (Iterable[_T]) -> Iterator[List[_T]]
+    grouped = itertools.groupby(enumerate(iterable), lambda pair: pair[0] // 2)
+    return ([elem for _, elem in group] for _, group in grouped)
+
+[case testReModuleBytes]
+# Regression tests for various overloads in the re module -- bytes version
+import re
+bre = b'a+'
+bpat = re.compile(bre)
+bpat = re.compile(bpat)
+re.search(bre, b'').groups()
+re.search(bre, u'') # Error
+re.search(bpat, b'').groups()
+re.search(bpat, u'') # Error
+# match(), split(), findall(), finditer() are much the same, so skip those.
+# sub(), subn() have more overloads and we are checking these:
+re.sub(bre, b'', b'') + b''
+re.sub(bpat, b'', b'') + b''
+re.sub(bre, lambda m: b'', b'') + b''
+re.sub(bpat, lambda m: b'', b'') + b''
+re.subn(bre, b'', b'')[0] + b''
+re.subn(bpat, b'', b'')[0] + b''
+re.subn(bre, lambda m: b'', b'')[0] + b''
+re.subn(bpat, lambda m: b'', b'')[0] + b''
+[out]
+_program.py:7: error: Type argument 1 of "search" has incompatible value "object"
+_program.py:9: error: Cannot infer type argument 1 of "search"
+
+[case testReModuleString]
+# Regression tests for various overloads in the re module -- string version
+import re
+sre = 'a+'
+spat = re.compile(sre)
+spat = re.compile(spat)
+re.search(sre, '').groups()
+re.search(sre, b'') # Error
+re.search(spat, '').groups()
+re.search(spat, b'') # Error
+# match(), split(), findall(), finditer() are much the same, so skip those.
+# sus(), susn() have more overloads and we are checking these:
+re.sub(sre, '', '') + ''
+re.sub(spat, '', '') + ''
+re.sub(sre, lambda m: '', '') + ''
+re.sub(spat, lambda m: '', '') + ''
+re.subn(sre, '', '')[0] + ''
+re.subn(spat, '', '')[0] + ''
+re.subn(sre, lambda m: '', '')[0] + ''
+re.subn(spat, lambda m: '', '')[0] + ''
+[out]
+_program.py:7: error: Type argument 1 of "search" has incompatible value "object"
+_program.py:9: error: Cannot infer type argument 1 of "search"
+
+[case testListSetitemTuple]
+from typing import List, Tuple
+a = []  # type: List[Tuple[str, int]]
+a[0] = 'x', 1
+a[1] = 2, 'y'
+a[:] = [('z', 3)]
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "Tuple[int, str]", target has type "Tuple[str, int]")
+
+[case testContextManager]
+import contextlib
+from contextlib import contextmanager
+from typing import Iterator
+
+ at contextmanager
+def f(x: int) -> Iterator[str]:
+    yield 'foo'
+
+ at contextlib.contextmanager
+def g(*x: str) -> Iterator[int]:
+    yield 1
+
+reveal_type(f)
+reveal_type(g)
+
+with f('') as s:
+    reveal_type(s)
+[out]
+_program.py:13: error: Revealed type is 'def (x: builtins.int) -> contextlib.GeneratorContextManager[builtins.str*]'
+_program.py:14: error: Revealed type is 'def (*x: builtins.str) -> contextlib.GeneratorContextManager[builtins.int*]'
+_program.py:16: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+_program.py:17: error: Revealed type is 'builtins.str*'
+
+[case testTypedDictGet]
+# Test that TypedDict get plugin works with typeshed stubs
+# TODO: Make it possible to use strict optional here
+from mypy_extensions import TypedDict
+class A: pass
+D = TypedDict('D', {'x': int, 'y': str})
+d: D
+reveal_type(d.get('x'))
+reveal_type(d.get('y'))
+d.get('z')
+d.get()
+s = ''
+reveal_type(d.get(s))
+[out]
+_testTypedDictGet.py:7: error: Revealed type is 'builtins.int'
+_testTypedDictGet.py:8: error: Revealed type is 'builtins.str'
+_testTypedDictGet.py:9: error: TypedDict "D" has no key 'z'
+_testTypedDictGet.py:10: error: No overload variant of "get" of "Mapping" matches argument types []
+_testTypedDictGet.py:12: error: Revealed type is 'builtins.object*'
+
+[case testTypedDictMappingMethods]
+from mypy_extensions import TypedDict
+Cell = TypedDict('Cell', {'value': int})
+c = Cell(value=42)
+for x in c:
+    reveal_type(x)
+reveal_type(iter(c))
+reveal_type(len(c))
+reveal_type('value' in c)
+reveal_type(c.keys())
+reveal_type(c.items())
+reveal_type(c.values())
+c == c
+c != c
+[out]
+_testTypedDictMappingMethods.py:5: error: Revealed type is 'builtins.str*'
+_testTypedDictMappingMethods.py:6: error: Revealed type is 'typing.Iterator[builtins.str*]'
+_testTypedDictMappingMethods.py:7: error: Revealed type is 'builtins.int'
+_testTypedDictMappingMethods.py:8: error: Revealed type is 'builtins.bool'
+_testTypedDictMappingMethods.py:9: error: Revealed type is 'typing.AbstractSet[builtins.str*]'
+_testTypedDictMappingMethods.py:10: error: Revealed type is 'typing.AbstractSet[Tuple[builtins.str*, builtins.int*]]'
+_testTypedDictMappingMethods.py:11: error: Revealed type is 'typing.ValuesView[builtins.int*]'
+
+[case testCanConvertTypedDictToAnySuperclassOfMapping]
+from mypy_extensions import TypedDict
+from typing import Sized, Iterable, Container
+
+Point = TypedDict('Point', {'x': int, 'y': int})
+
+p: Point
+s: Sized = p
+it: Iterable[str] = p
+c: Container[str] = p
+o: object = p
+it2: Iterable[int] = p
+[out]
+_testCanConvertTypedDictToAnySuperclassOfMapping.py:11: error: Incompatible types in assignment (expression has type "Point", variable has type Iterable[int])
diff --git a/test-data/unit/semanal-abstractclasses.test b/test-data/unit/semanal-abstractclasses.test
new file mode 100644
index 0000000..b5147bd
--- /dev/null
+++ b/test-data/unit/semanal-abstractclasses.test
@@ -0,0 +1,119 @@
+[case testAbstractMethods]
+from abc import abstractmethod, ABCMeta
+import typing
+
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  def g(self) -> 'A': pass
+  @abstractmethod
+  def f(self) -> 'A': return self
+[out]
+MypyFile:1(
+  ImportFrom:1(abc, [abstractmethod, ABCMeta])
+  Import:2(typing)
+  ClassDef:4(
+    A
+    Metaclass(ABCMeta)
+    Decorator:5(
+      Var(g)
+      FuncDef:6(
+        g
+        Args(
+          Var(self))
+        def (self: __main__.A) -> __main__.A
+        Abstract
+        Block:6(
+          PassStmt:6())))
+    Decorator:7(
+      Var(f)
+      FuncDef:8(
+        f
+        Args(
+          Var(self))
+        def (self: __main__.A) -> __main__.A
+        Abstract
+        Block:8(
+          ReturnStmt:8(
+            NameExpr(self [l])))))))
+
+[case testClassInheritingTwoAbstractClasses]
+from abc import abstractmethod, ABCMeta
+import typing
+
+class A(metaclass=ABCMeta): pass
+class B(metaclass=ABCMeta): pass
+class C(A, B): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(abc, [abstractmethod, ABCMeta])
+  Import:2(typing)
+  ClassDef:4(
+    A
+    Metaclass(ABCMeta)
+    PassStmt:4())
+  ClassDef:5(
+    B
+    Metaclass(ABCMeta)
+    PassStmt:5())
+  ClassDef:6(
+    C
+    BaseType(
+      __main__.A
+      __main__.B)
+    PassStmt:6()))
+
+[case testAbstractGenericClass]
+from abc import abstractmethod
+from typing import Generic, TypeVar
+T = TypeVar('T')
+class A(Generic[T]):
+  @abstractmethod
+  def f(self) -> 'A[T]': pass
+[out]
+MypyFile:1(
+  ImportFrom:1(abc, [abstractmethod])
+  ImportFrom:2(typing, [Generic, TypeVar])
+  AssignmentStmt:3(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:3())
+  ClassDef:4(
+    A
+    TypeVars(
+      T)
+    Decorator:5(
+      Var(f)
+      FuncDef:6(
+        f
+        Args(
+          Var(self))
+        def (self: __main__.A[T`1]) -> __main__.A[T`1]
+        Abstract
+        Block:6(
+          PassStmt:6())))))
+
+[case testFullyQualifiedAbstractMethodDecl]
+import abc
+from abc import ABCMeta
+import typing
+
+class A(metaclass=ABCMeta):
+  @abc.abstractmethod
+  def g(self) -> 'A': pass
+[out]
+MypyFile:1(
+  Import:1(abc)
+  ImportFrom:2(abc, [ABCMeta])
+  Import:3(typing)
+  ClassDef:5(
+    A
+    Metaclass(ABCMeta)
+    Decorator:6(
+      Var(g)
+      FuncDef:7(
+        g
+        Args(
+          Var(self))
+        def (self: __main__.A) -> __main__.A
+        Abstract
+        Block:7(
+          PassStmt:7())))))
diff --git a/test-data/unit/semanal-basic.test b/test-data/unit/semanal-basic.test
new file mode 100644
index 0000000..3c11da8
--- /dev/null
+++ b/test-data/unit/semanal-basic.test
@@ -0,0 +1,459 @@
+[case testEmptyFile]
+[out]
+MypyFile:1()
+
+[case testGlobalVariable]
+x = 1
+x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  ExpressionStmt:2(
+    NameExpr(x [__main__.x])))
+
+[case testMultipleGlobals]
+x = y = 2
+z = 3
+(x, y, z)
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(2))
+  AssignmentStmt:2(
+    NameExpr(z* [__main__.z])
+    IntExpr(3))
+  ExpressionStmt:3(
+    TupleExpr:3(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y])
+      NameExpr(z [__main__.z]))))
+
+[case testEmptyFunction]
+def f(): pass
+f()
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      PassStmt:1()))
+  ExpressionStmt:2(
+    CallExpr:2(
+      NameExpr(f [__main__.f])
+      Args())))
+
+[case testAccessingGlobalNameBeforeDefinition]
+x
+f()
+x = 1
+def f(): pass
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(x [__main__.x]))
+  ExpressionStmt:2(
+    CallExpr:2(
+      NameExpr(f [__main__.f])
+      Args()))
+  AssignmentStmt:3(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  FuncDef:4(
+    f
+    Block:4(
+      PassStmt:4())))
+
+[case testFunctionArgs]
+def f(x, y):
+  (x, y)
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(y))
+    Block:1(
+      ExpressionStmt:2(
+        TupleExpr:2(
+          NameExpr(x [l])
+          NameExpr(y [l]))))))
+
+[case testLocalVar]
+def f():
+  x = 1
+  x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x* [l])
+        IntExpr(1))
+      ExpressionStmt:3(
+        NameExpr(x [l])))))
+
+[case testAccessGlobalInFn]
+def f():
+  x
+  g()
+x = 1
+def g(): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(x [__main__.x]))
+      ExpressionStmt:3(
+        CallExpr:3(
+          NameExpr(g [__main__.g])
+          Args()))))
+  AssignmentStmt:4(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  FuncDef:5(
+    g
+    Block:5(
+      PassStmt:5())))
+
+[case testAssignmentAfterInit]
+x = 1
+x = 2
+def f(y):
+  y = 1
+  z = 1
+  z = 2
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    IntExpr(2))
+  FuncDef:3(
+    f
+    Args(
+      Var(y))
+    Block:3(
+      AssignmentStmt:4(
+        NameExpr(y [l])
+        IntExpr(1))
+      AssignmentStmt:5(
+        NameExpr(z* [l])
+        IntExpr(1))
+      AssignmentStmt:6(
+        NameExpr(z [l])
+        IntExpr(2)))))
+
+[case testLocalAndGlobalAliasing]
+x = 1
+def f():
+  x = 2
+  x
+x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  FuncDef:2(
+    f
+    Block:2(
+      AssignmentStmt:3(
+        NameExpr(x* [l])
+        IntExpr(2))
+      ExpressionStmt:4(
+        NameExpr(x [l]))))
+  ExpressionStmt:5(
+    NameExpr(x [__main__.x])))
+
+[case testArgumentInitializers]
+def f(x = f, y = object):
+  x, y
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(y))
+    Init(
+      AssignmentStmt:1(
+        NameExpr(x [l])
+        NameExpr(f [__main__.f]))
+      AssignmentStmt:1(
+        NameExpr(y [l])
+        NameExpr(object [builtins.object])))
+    Block:1(
+      ExpressionStmt:2(
+        TupleExpr:2(
+          NameExpr(x [l])
+          NameExpr(y [l]))))))
+
+[case testVarArgs]
+def f(x, *y):
+  x, y
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    VarArg(
+      Var(y))
+    Block:1(
+      ExpressionStmt:2(
+        TupleExpr:2(
+          NameExpr(x [l])
+          NameExpr(y [l]))))))
+
+[case testGlobalDecl]
+x = None
+def f():
+    global x
+    x = None
+    x
+class A: pass
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(None [builtins.None]))
+  FuncDef:2(
+    f
+    Block:2(
+      GlobalDecl:3(
+        x)
+      AssignmentStmt:4(
+        NameExpr(x [__main__.x])
+        NameExpr(None [builtins.None]))
+      ExpressionStmt:5(
+        NameExpr(x [__main__.x]))))
+  ClassDef:6(
+    A
+    PassStmt:6()))
+
+[case testMultipleNamesInGlobalDecl]
+x, y = None, None
+def f():
+    global x, y
+    x = y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    TupleExpr:1(
+      NameExpr(None [builtins.None])
+      NameExpr(None [builtins.None])))
+  FuncDef:2(
+    f
+    Block:2(
+      GlobalDecl:3(
+        x
+        y)
+      AssignmentStmt:4(
+        NameExpr(x [__main__.x])
+        NameExpr(y [__main__.y])))))
+
+[case testGlobalDeclScope]
+x = None
+def f():
+    global x
+def g():
+    x = None
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(None [builtins.None]))
+  FuncDef:2(
+    f
+    Block:2(
+      GlobalDecl:3(
+        x)))
+  FuncDef:4(
+    g
+    Block:4(
+      AssignmentStmt:5(
+        NameExpr(x* [l])
+        NameExpr(None [builtins.None])))))
+
+[case testGlobalDeclScope]
+x = None
+def f():
+    global x
+def g():
+    x = None
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(None [builtins.None]))
+  FuncDef:2(
+    f
+    Block:2(
+      GlobalDecl:3(
+        x)))
+  FuncDef:4(
+    g
+    Block:4(
+      AssignmentStmt:5(
+        NameExpr(x* [l])
+        NameExpr(None [builtins.None])))))
+
+[case testGlobaWithinMethod]
+x = None
+class A:
+  def f(self):
+    global x
+    x = self
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(None [builtins.None]))
+  ClassDef:2(
+    A
+    FuncDef:3(
+      f
+      Args(
+        Var(self))
+      Block:3(
+        GlobalDecl:4(
+          x)
+        AssignmentStmt:5(
+          NameExpr(x [__main__.x])
+          NameExpr(self [l]))))))
+
+[case testGlobalDefinedInBlock]
+if object:
+    x = object()
+    x = x
+x
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      NameExpr(object [builtins.object]))
+    Then(
+      AssignmentStmt:2(
+        NameExpr(x* [__main__.x])
+        CallExpr:2(
+          NameExpr(object [builtins.object])
+          Args()))
+      AssignmentStmt:3(
+        NameExpr(x [__main__.x])
+        NameExpr(x [__main__.x]))))
+  ExpressionStmt:4(
+    NameExpr(x [__main__.x])))
+
+[case testNonlocalDecl]
+def g():
+    x = None
+    def f():
+        nonlocal x
+        x = None
+        x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    g
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x* [l])
+        NameExpr(None [builtins.None]))
+      FuncDef:3(
+        f
+        Block:3(
+          NonlocalDecl:4(
+            x)
+          AssignmentStmt:5(
+            NameExpr(x [l])
+            NameExpr(None [builtins.None]))
+          ExpressionStmt:6(
+            NameExpr(x [l])))))))
+
+[case testMultipleNamesInNonlocalDecl]
+def g():
+    x, y = None, None
+    def f(z):
+        nonlocal x, y
+        x = y
+[out]
+MypyFile:1(
+  FuncDef:1(
+    g
+    Block:1(
+      AssignmentStmt:2(
+        TupleExpr:2(
+          NameExpr(x* [l])
+          NameExpr(y* [l]))
+        TupleExpr:2(
+          NameExpr(None [builtins.None])
+          NameExpr(None [builtins.None])))
+      FuncDef:3(
+        f
+        Args(
+          Var(z))
+        Block:3(
+          NonlocalDecl:4(
+            x
+            y)
+          AssignmentStmt:5(
+            NameExpr(x [l])
+            NameExpr(y [l])))))))
+
+[case testNestedFunctions]
+def f(x):
+    def g(y):
+        z = y + x
+    return g
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      FuncDef:2(
+        g
+        Args(
+          Var(y))
+        Block:2(
+          AssignmentStmt:3(
+            NameExpr(z* [l])
+            OpExpr:3(
+              +
+              NameExpr(y [l])
+              NameExpr(x [l])))))
+      ReturnStmt:4(
+        NameExpr(g [l])))))
+
+[case testNestedFunctionWithOverlappingName]
+def f(x):
+    def g():
+        x = 1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      FuncDef:2(
+        g
+        Block:2(
+          AssignmentStmt:3(
+            NameExpr(x* [l])
+            IntExpr(1)))))))
diff --git a/test-data/unit/semanal-classes.test b/test-data/unit/semanal-classes.test
new file mode 100644
index 0000000..22beb8d
--- /dev/null
+++ b/test-data/unit/semanal-classes.test
@@ -0,0 +1,633 @@
+-- Test cases related to classes for the semantic analyzer.
+
+[case testSimpleClass]
+class A: pass
+x = A
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    PassStmt:1())
+  AssignmentStmt:2(
+    NameExpr(x* [__main__.x])
+    NameExpr(A [__main__.A])))
+
+[case testMethods]
+class A:
+  def __init__(self, x):
+    y = x
+  def f(self):
+    y = self
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(self)
+        Var(x))
+      Block:2(
+        AssignmentStmt:3(
+          NameExpr(y* [l])
+          NameExpr(x [l]))))
+    FuncDef:4(
+      f
+      Args(
+        Var(self))
+      Block:4(
+        AssignmentStmt:5(
+          NameExpr(y* [l])
+          NameExpr(self [l]))))))
+
+[case testMemberDefinitionInInit]
+class A:
+  def __init__(self):
+    self.x = 1
+    self.y = 2
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(self))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(self [l])
+            x*)
+          IntExpr(1))
+        AssignmentStmt:4(
+          MemberExpr:4(
+            NameExpr(self [l])
+            y*)
+          IntExpr(2))))))
+
+[case testMemberAssignmentViaSelfOutsideInit]
+class A:
+  def f(self):
+    self.x = 1
+def __init__(self):
+  self.y = 1
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(self [l])
+            x*)
+          IntExpr(1)))))
+  FuncDef:4(
+    __init__
+    Args(
+      Var(self))
+    Block:4(
+      AssignmentStmt:5(
+        MemberExpr:5(
+          NameExpr(self [l])
+          y)
+        IntExpr(1)))))
+
+[case testMemberAssignmentNotViaSelf]
+class A:
+  def __init__(x, self):
+    self.y = 1 # not really self
+class B:
+  def __init__(x):
+    self = x
+    self.z = 1
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(x)
+        Var(self))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(self [l])
+            y)
+          IntExpr(1)))))
+  ClassDef:4(
+    B
+    FuncDef:5(
+      __init__
+      Args(
+        Var(x))
+      Block:5(
+        AssignmentStmt:6(
+          NameExpr(self* [l])
+          NameExpr(x [l]))
+        AssignmentStmt:7(
+          MemberExpr:7(
+            NameExpr(self [l])
+            z)
+          IntExpr(1))))))
+
+[case testNonStandardNameForSelfAndInit]
+class A:
+  def __init__(x):
+    x.y = 1
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(x))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(x [l])
+            y*)
+          IntExpr(1))))))
+
+[case testAssignmentAfterAttributeInit]
+class A:
+  def __init__(self):
+    self.x = 1
+    self.x = 2
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(self))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(self [l])
+            x*)
+          IntExpr(1))
+        AssignmentStmt:4(
+          MemberExpr:4(
+            NameExpr(self [l])
+            x)
+          IntExpr(2))))))
+
+[case testOverloadedMethod]
+from typing import overload
+class A:
+  @overload
+  def f(self) -> None: self
+  @overload
+  def f(self, x: 'A') -> None: self
+  def f(self, *args): self
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [overload])
+  ClassDef:2(
+    A
+    OverloadedFuncDef:3(
+      FuncDef:7(
+        f
+        Args(
+          Var(self))
+        VarArg(
+          Var(args))
+        Block:7(
+          ExpressionStmt:7(
+            NameExpr(self [l]))))
+      Overload(def (self: __main__.A), \
+               def (self: __main__.A, x: __main__.A))
+      Decorator:3(
+        Var(f)
+        NameExpr(overload [typing.overload])
+        FuncDef:4(
+          f
+          Args(
+            Var(self))
+          def (self: __main__.A)
+          Block:4(
+            ExpressionStmt:4(
+              NameExpr(self [l])))))
+      Decorator:5(
+        Var(f)
+        NameExpr(overload [typing.overload])
+        FuncDef:6(
+          f
+          Args(
+            Var(self)
+            Var(x))
+          def (self: __main__.A, x: __main__.A)
+          Block:6(
+            ExpressionStmt:6(
+              NameExpr(self [l]))))))))
+
+[case testAttributeWithoutType]
+class A:
+    a = object
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    AssignmentStmt:2(
+      NameExpr(a* [m])
+      NameExpr(object [builtins.object]))))
+
+[case testDataAttributeRefInClassBody]
+class A:
+    x = 1
+    y = x
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    AssignmentStmt:2(
+      NameExpr(x* [m])
+      IntExpr(1))
+    AssignmentStmt:3(
+      NameExpr(y* [m])
+      NameExpr(x [__main__.A.x]))))
+
+[case testMethodRefInClassBody]
+class A:
+    def f(self): pass
+    g = f
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self))
+      Block:2(
+        PassStmt:2()))
+    AssignmentStmt:3(
+      NameExpr(g* [m])
+      NameExpr(f [__main__.A.f]))))
+
+[case testIfStatementInClassBody]
+class A:
+    if A:
+        x = 1
+    else:
+        x = 2
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    IfStmt:2(
+      If(
+        NameExpr(A [__main__.A]))
+      Then(
+        AssignmentStmt:3(
+          NameExpr(x* [m])
+          IntExpr(1)))
+      Else(
+        AssignmentStmt:5(
+          NameExpr(x [__main__.A.x])
+          IntExpr(2))))))
+
+[case testForStatementInClassBody]
+class A:
+    for x in [1, 2]:
+        y = x
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ForStmt:2(
+      NameExpr(x* [m])
+      ListExpr:2(
+        IntExpr(1)
+        IntExpr(2))
+      Block:2(
+        AssignmentStmt:3(
+          NameExpr(y* [m])
+          NameExpr(x [__main__.A.x]))))))
+
+[case testReferenceToClassWithinFunction]
+def f():
+    class A: pass
+    A
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ClassDef:2(
+        A
+        PassStmt:2())
+      ExpressionStmt:3(
+        NameExpr(A [__main__.A at 2])))))
+
+[case testReferenceToClassWithinClass]
+class A:
+    class B: pass
+    B
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ClassDef:2(
+      B
+      PassStmt:2())
+    ExpressionStmt:3(
+      NameExpr(B [__main__.A.B]))))
+
+[case testClassWithBaseClassWithinClass]
+class A:
+    class B: pass
+    class C(B): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ClassDef:2(
+      B
+      PassStmt:2())
+    ClassDef:3(
+      C
+      BaseType(
+        __main__.A.B)
+      PassStmt:3())))
+
+[case testDeclarationReferenceToNestedClass]
+def f() -> None:
+    class A: pass
+    x = None # type: A
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def ()
+    Block:1(
+      ClassDef:2(
+        A
+        PassStmt:2())
+      AssignmentStmt:3(
+        NameExpr(x [l])
+        NameExpr(None [builtins.None])
+        __main__.A at 2))))
+
+[case testAccessToLocalInOuterScopeWithinNestedClass]
+def f(x):
+    class A:
+        y = x
+        def g(self):
+            z = x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      ClassDef:2(
+        A
+        AssignmentStmt:3(
+          NameExpr(y* [m])
+          NameExpr(x [l]))
+        FuncDef:4(
+          g
+          Args(
+            Var(self))
+          Block:4(
+            AssignmentStmt:5(
+              NameExpr(z* [l])
+              NameExpr(x [l]))))))))
+
+[case testQualifiedMetaclass]
+import abc
+class A(metaclass=abc.ABCMeta): pass
+[out]
+MypyFile:1(
+  Import:1(abc)
+  ClassDef:2(
+    A
+    Metaclass(abc.ABCMeta)
+    PassStmt:2()))
+
+[case testStaticMethod]
+class A:
+  @staticmethod
+  def f(z: int) -> str: pass
+[builtins fixtures/staticmethod.pyi]
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    Decorator:2(
+      Var(f)
+      FuncDef:3(
+        f
+        Args(
+          Var(z))
+        def (z: builtins.int) -> builtins.str
+        Static
+        Block:3(
+          PassStmt:3())))))
+
+[case testStaticMethodWithNoArgs]
+class A:
+  @staticmethod
+  def f() -> str: pass
+[builtins fixtures/staticmethod.pyi]
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    Decorator:2(
+      Var(f)
+      FuncDef:3(
+        f
+        def () -> builtins.str
+        Static
+        Block:3(
+          PassStmt:3())))))
+
+[case testClassMethod]
+class A:
+  @classmethod
+  def f(cls, z: int) -> str: pass
+[builtins fixtures/classmethod.pyi]
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    Decorator:2(
+      Var(f)
+      FuncDef:3(
+        f
+        Args(
+          Var(cls)
+          Var(z))
+        def (cls: def () -> __main__.A, z: builtins.int) -> builtins.str
+        Class
+        Block:3(
+          PassStmt:3())))))
+
+[case testClassMethodWithNoArgs]
+class A:
+  @classmethod
+  def f(cls) -> str: pass
+[builtins fixtures/classmethod.pyi]
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    Decorator:2(
+      Var(f)
+      FuncDef:3(
+        f
+        Args(
+          Var(cls))
+        def (cls: def () -> __main__.A) -> builtins.str
+        Class
+        Block:3(
+          PassStmt:3())))))
+
+[case testProperty]
+import typing
+class A:
+  @property
+  def f(self) -> str: pass
+[builtins fixtures/property.pyi]
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    Decorator:3(
+      Var(f)
+      FuncDef:4(
+        f
+        Args(
+          Var(self))
+        def (self: __main__.A) -> builtins.str
+        Property
+        Block:4(
+          PassStmt:4())))))
+
+[case testClassDecorator]
+import typing
+ at object
+class A: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    Decorators(
+      NameExpr(object [builtins.object]))
+    PassStmt:3()))
+
+[case testClassAttributeAsMethodDefaultArgumentValue]
+import typing
+class A:
+    X = 1
+    def f(self, x : int = X) -> None: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    AssignmentStmt:3(
+      NameExpr(X* [m])
+      IntExpr(1))
+    FuncDef:4(
+      f
+      Args(
+        Var(self)
+        Var(x))
+      def (self: __main__.A, x: builtins.int =)
+      Init(
+        AssignmentStmt:4(
+          NameExpr(x [l])
+          NameExpr(X [__main__.A.X])))
+      Block:4(
+        PassStmt:4()))))
+
+[case testInvalidBaseClass]
+from typing import Any, Callable
+class A(None): pass
+class B(Any): pass
+class C(Callable[[], int]): pass
+[out]
+main: error: Invalid base class
+main:4: error: Invalid base class
+
+[case testTupleAsBaseClass]
+import m
+[file m.pyi]
+from typing import Tuple
+class A(Tuple[int, str]): pass
+[builtins fixtures/tuple.pyi]
+[out]
+MypyFile:1(
+  Import:1(m))
+MypyFile:1(
+  tmp/m.pyi
+  ImportFrom:1(typing, [Tuple])
+  ClassDef:2(
+    A
+    TupleType(
+      Tuple[builtins.int, builtins.str])
+    BaseType(
+      builtins.tuple[Any])
+    PassStmt:2()))
+
+[case testBaseClassFromIgnoredModule]
+import m # type: ignore
+class B(m.A):
+   pass
+[out]
+MypyFile:1(
+  Import:1(m)
+  ClassDef:2(
+    B
+    FallbackToAny
+    BaseType(
+      builtins.object)
+    PassStmt:3())
+  IgnoredLines(1))
+
+[case testBaseClassFromIgnoredModuleUsingImportFrom]
+from m import A # type: ignore
+class B(A, int):
+   pass
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [A])
+  ClassDef:2(
+    B
+    FallbackToAny
+    BaseType(
+      builtins.int)
+    PassStmt:3())
+  IgnoredLines(1))
+
+[case testBaseClassWithExplicitAnyType]
+from typing import Any
+A = 1 # type: Any
+class B(A):
+   pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  AssignmentStmt:2(
+    NameExpr(A [__main__.A])
+    IntExpr(1)
+    Any)
+  ClassDef:3(
+    B
+    FallbackToAny
+    BaseType(
+      builtins.object)
+    PassStmt:4()))
diff --git a/test-data/unit/semanal-classvar.test b/test-data/unit/semanal-classvar.test
new file mode 100644
index 0000000..d2e474c
--- /dev/null
+++ b/test-data/unit/semanal-classvar.test
@@ -0,0 +1,223 @@
+[case testClassVarDef]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [ClassVar])
+  ClassDef:2(
+    A
+    AssignmentStmt:3(
+      NameExpr(x [m])
+      IntExpr(1)
+      builtins.int)))
+
+[case testClassVarDefInModuleScope]
+from typing import ClassVar
+x = None  # type: ClassVar[int]
+[out]
+main:2: error: ClassVar can only be used for assignments in class body
+
+[case testClassVarDefInFuncScope]
+from typing import ClassVar
+def f() -> None:
+    x = None  # type: ClassVar[int]
+[out]
+main:3: error: ClassVar can only be used for assignments in class body
+
+[case testClassVarDefInMethod]
+from typing import ClassVar
+class A:
+    def f(self) -> None:
+        x = None  # type: ClassVar
+[out]
+main:4: error: ClassVar can only be used for assignments in class body
+
+[case testClassVarTooManyArguments]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar[int, str]
+[out]
+main:3: error: ClassVar[...] must have at most one type argument
+
+[case testClassVarWithoutArguments]
+from typing import ClassVar
+class A:
+    x = 1  # type: ClassVar
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [ClassVar])
+  ClassDef:2(
+    A
+    AssignmentStmt:3(
+      NameExpr(x [m])
+      IntExpr(1)
+      Any)))
+
+[case testClassVarWithTypeVar]
+from typing import ClassVar, TypeVar
+T = TypeVar('T')
+class A:
+    x = None  # type: ClassVar[T]
+[out]
+main:4: error: Invalid type "__main__.T"
+
+[case testClassVarInFunctionArgs]
+from typing import ClassVar
+def f(x: str, y: ClassVar) -> None: pass
+[out]
+main:2: error: ClassVar can only be used for assignments in class body
+
+[case testClassVarInMethodArgs]
+from typing import ClassVar
+class A:
+    def f(x: str, y: ClassVar) -> None: pass
+[out]
+main:3: error: ClassVar can only be used for assignments in class body
+
+[case testClassVarFunctionRetType]
+from typing import ClassVar
+def f() -> ClassVar: pass
+[out]
+main:2: error: ClassVar can only be used for assignments in class body
+
+[case testClassVarMethodRetType]
+from typing import ClassVar
+class A:
+    def f(self) -> ClassVar: pass
+[out]
+main:3: error: ClassVar can only be used for assignments in class body
+
+[case testMultipleClassVarInFunctionSig]
+from typing import ClassVar
+def f(x: ClassVar, y: ClassVar) -> ClassVar: pass
+[out]
+main:2: error: ClassVar can only be used for assignments in class body
+
+[case testClassVarInCallableArgs]
+from typing import Callable, ClassVar, Any
+f = None  # type: Callable[[int, ClassVar], Any]
+[out]
+main:2: error: Invalid type: ClassVar nested inside other type
+
+[case testClassVarInCallableRet]
+from typing import Callable, ClassVar
+f = None  # type: Callable[..., ClassVar]
+[out]
+main:2: error: Invalid type: ClassVar nested inside other type
+
+[case testClassVarInUnion]
+from typing import ClassVar, Union
+x = None  # type: Union[ClassVar, str]
+[out]
+main:2: error: Invalid type: ClassVar nested inside other type
+
+[case testClassVarInUnionAsAttribute]
+from typing import ClassVar, Union
+class A:
+    x = None  # type: Union[ClassVar, str]
+[out]
+main:3: error: Invalid type: ClassVar nested inside other type
+
+[case testListWithClassVars]
+from typing import ClassVar, List
+x = []  # type: List[ClassVar]
+[builtins fixtures/list.pyi]
+[out]
+main:2: error: Invalid type: ClassVar nested inside other type
+
+[case testTupleClassVar]
+from typing import ClassVar, Tuple
+x = None  # type: Tuple[ClassVar, int]
+[out]
+main:2: error: Invalid type: ClassVar nested inside other type
+
+[case testMultipleLvaluesWithList]
+from typing import ClassVar, List
+class A:
+    [x, y] = None, None  # type: List[ClassVar]
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Invalid type: ClassVar nested inside other type
+
+[case testDeeplyNested]
+from typing import Callable, ClassVar, Union
+class A: pass
+class B:
+    x = None  # type: Union[str, Callable[[A, ClassVar], int]]
+[out]
+main:4: error: Invalid type: ClassVar nested inside other type
+
+[case testClassVarInClassVar]
+from typing import ClassVar
+class A:
+    x = None  # type: ClassVar[ClassVar[int]]
+[out]
+main:3: error: Invalid type: ClassVar nested inside other type
+
+[case testInsideGeneric]
+from typing import ClassVar, Generic, TypeVar
+T = TypeVar('T')
+class A(Generic[T]): pass
+class B:
+    x = None  # type: A[ClassVar]
+[out]
+main:5: error: Invalid type: ClassVar nested inside other type
+
+[case testDefineOnSelf]
+from typing import ClassVar
+class A:
+    def __init__(self) -> None:
+        self.x = None  # type: ClassVar
+[out]
+main:4: error: ClassVar can only be used for assignments in class body
+
+[case testForIndex]
+from typing import ClassVar
+for i in []:  # type: ClassVar
+    pass
+[out]
+main:2: error: ClassVar can only be used for assignments in class body
+
+[case testForIndexInClassBody]
+from typing import ClassVar
+class A:
+    for i in []:  # type: ClassVar
+        pass
+[out]
+main:3: error: ClassVar can only be used for assignments in class body
+
+[case testWithStmt]
+from typing import ClassVar
+class A: pass
+with A() as x:  # type: ClassVar
+    pass
+[out]
+main:3: error: ClassVar can only be used for assignments in class body
+
+[case testWithStmtInClassBody]
+from typing import ClassVar
+class A: pass
+class B:
+    with A() as x:  # type: ClassVar
+        pass
+[out]
+main:4: error: ClassVar can only be used for assignments in class body
+
+[case testClassVarWithGeneric]
+from typing import ClassVar, Generic, TypeVar
+T = TypeVar('T')
+class A(Generic[T]):
+    x = None  # type: ClassVar[T]
+[out]
+main:4: error: Invalid type: ClassVar cannot be generic
+
+[case testClassVarWithNestedGeneric]
+from typing import ClassVar, Generic, List, TypeVar, Union
+T = TypeVar('T')
+U = TypeVar('U')
+class A(Generic[T, U]):
+    x = None  # type: ClassVar[Union[T, List[U]]]
+[builtins fixtures/list.pyi]
+[out]
+main:5: error: Invalid type: ClassVar cannot be generic
diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test
new file mode 100644
index 0000000..2192bce
--- /dev/null
+++ b/test-data/unit/semanal-errors.test
@@ -0,0 +1,1403 @@
+[case testUndefinedVariableInGlobalStatement]
+import typing
+x
+y
+[out]
+main:2: error: Name 'x' is not defined
+main:3: error: Name 'y' is not defined
+
+[case testUndefinedVariableWithinFunctionContext]
+import typing
+def f() -> None:
+  x
+y
+[out]
+main:3: error: Name 'x' is not defined
+main:4: error: Name 'y' is not defined
+
+[case testMethodScope]
+import typing
+class A:
+  def f(self): pass
+f
+[out]
+main:4: error: Name 'f' is not defined
+
+[case testMethodScope2]
+import typing
+class A:
+  def f(self): pass
+class B:
+  def g(self) -> None:
+    f # error
+    g # error
+[out]
+main:6: error: Name 'f' is not defined
+main:7: error: Name 'g' is not defined
+
+[case testInvalidType]
+import typing
+x = None # type: X
+[out]
+main:2: error: Name 'X' is not defined
+
+[case testInvalidGenericArg]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+x = 0 # type: A[y]
+[out]
+main:4: error: Name 'y' is not defined
+
+[case testInvalidNumberOfGenericArgsInTypeDecl]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A: pass
+class B(Generic[t]): pass
+x = 0 # type: B[A, A]
+y = 0 # type: A[A]
+[out]
+main:5: error: "B" expects 1 type argument, but 2 given
+main:6: error: "A" expects no type arguments, but 1 given
+
+[case testInvalidNumberOfGenericArgsInUndefinedArg]
+
+class A: pass
+x = None  # type: A[int] # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInNestedBlock]
+
+class A: pass
+class B:
+    def f(self) -> None:
+        while 1:
+            x = None  # type: A[int] \
+                # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInSignature]
+import typing
+class A: pass
+def f() -> A[int]: pass # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInOverloadedSignature]
+from typing import overload
+class A: pass
+ at overload
+def f(): pass
+ at overload
+def f(x: A[int]) -> None: pass # E: "A" expects no type arguments, but 1 given
+def f(*args): pass
+[out]
+
+[case testInvalidNumberOfGenericArgsInBaseType]
+import typing
+class A: pass
+class B(A[int]): pass # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInCast]
+from typing import cast
+class A: pass
+x = cast(A[int], 1) # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInNestedGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]): pass
+class B: pass
+def f() -> A[B[int]]: pass # E: "B" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInTupleType]
+from typing import Tuple
+class A: pass
+x = None # type: Tuple[A[int]] # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInFunctionType]
+from typing import Callable
+class A: pass
+x = None # type: Callable[[A[int]], int]  # E: "A" expects no type arguments, but 1 given
+y = None # type: Callable[[], A[int]]  # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testVarOrFuncAsType]
+import typing
+def f(): pass
+x = 1
+y = 0 # type: f
+z = 0 # type: x
+[out]
+main:4: error: Invalid type "__main__.f"
+main:5: error: Invalid type "__main__.x"
+
+[case testGlobalVarRedefinition]
+import typing
+class A: pass
+x = 0 # type: A
+x = 0 # type: A
+[out]
+main:4: error: Name 'x' already defined
+
+[case testLocalVarRedefinition]
+import typing
+class A: pass
+def f() -> None:
+  x = 0 # type: A
+  x = 0 # type: A
+[out]
+main:5: error: Name 'x' already defined
+
+[case testClassVarRedefinition]
+import typing
+class A:
+  x = 0 # type: object
+  x = 0 # type: object
+[out]
+main:4: error: Name 'x' already defined
+
+[case testMultipleClassDefinitions]
+import typing
+class A: pass
+class A: pass
+[out]
+main:3: error: Name 'A' already defined on line 2
+
+[case testMultipleMixedDefinitions]
+import typing
+x = 1
+def x(): pass
+class x: pass
+[out]
+main:3: error: Name 'x' already defined on line 2
+main:4: error: Name 'x' already defined on line 2
+
+[case testNameNotImported]
+import typing
+from m import y
+x
+[file m.py]
+x = y = 1
+[out]
+main:3: error: Name 'x' is not defined
+
+[case testMissingNameInImportFrom]
+import typing
+from m import y
+[file m.py]
+x = 1
+[out]
+main:2: error: Module 'm' has no attribute 'y'
+
+[case testMissingModule]
+import typing
+import m
+[out]
+main:2: error: Cannot find module named 'm'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testMissingModule2]
+import typing
+from m import x
+[out]
+main:2: error: Cannot find module named 'm'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testMissingModule3]
+import typing
+from m import *
+[out]
+main:2: error: Cannot find module named 'm'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testMissingModuleRelativeImport]
+import typing
+import m
+[file m/__init__.py]
+from .x import y
+[out]
+tmp/m/__init__.py:1: error: Cannot find module named 'm.x'
+tmp/m/__init__.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testMissingModuleRelativeImport2]
+import typing
+import m.a
+[file m/__init__.py]
+[file m/a.py]
+from .x import y
+[out]
+tmp/m/a.py:1: error: Cannot find module named 'm.x'
+tmp/m/a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testModuleNotImported]
+import typing
+import _m
+_n.x
+[file _m.py]
+import _n
+[file _n.py]
+x = 1
+[out]
+main:3: error: Name '_n' is not defined
+
+[case testImportAsteriskPlusUnderscore]
+import typing
+from _m import *
+_x
+__x__
+[file _m.py]
+_x = __x__ = 1
+[out]
+main:3: error: Name '_x' is not defined
+main:4: error: Name '__x__' is not defined
+
+[case testRelativeImportAtTopLevelModule]
+from . import m
+[out]
+main:1: error: No parent module -- cannot perform relative import
+
+[case testRelativeImportAtTopLevelModule2]
+from .. import m
+[out]
+main:1: error: No parent module -- cannot perform relative import
+
+[case testUndefinedTypeWithQualifiedName]
+import typing
+import m
+def f() -> m.c: pass
+def g() -> n.c: pass
+[file m.py]
+[out]
+main:3: error: Name 'm.c' is not defined
+main:4: error: Name 'n' is not defined
+
+[case testMissingPackage]
+import typing
+import m.n
+[out]
+main:2: error: Cannot find module named 'm'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Cannot find module named 'm.n'
+
+[case testMissingPackage]
+import typing
+from m.n import x
+from a.b import *
+[out]
+main:2: error: Cannot find module named 'm.n'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:3: error: Cannot find module named 'a.b'
+
+[case testErrorInImportedModule]
+import m
+[file m.py]
+import typing
+x = y
+[out]
+tmp/m.py:2: error: Name 'y' is not defined
+
+[case testErrorInImportedModule2]
+import m.n
+[file m/__init__.py]
+[file m/n.py]
+import k
+[file k.py]
+import typing
+x = y
+[out]
+tmp/k.py:2: error: Name 'y' is not defined
+
+[case testPackageWithoutInitFile]
+import typing
+import m.n
+m.n.x
+[file m/n.py]
+x = 1
+[out]
+main:2: error: Cannot find module named 'm'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Cannot find module named 'm.n'
+
+[case testBreakOutsideLoop]
+break
+def f():
+  break
+[out]
+main:1: error: 'break' outside loop
+main:3: error: 'break' outside loop
+
+[case testContinueOutsideLoop]
+continue
+def f():
+  continue
+[out]
+main:1: error: 'continue' outside loop
+main:3: error: 'continue' outside loop
+
+[case testReturnOutsideFunction]
+def f(): pass
+return
+return 1
+[out]
+main:2: error: 'return' outside function
+main:3: error: 'return' outside function
+
+[case testYieldOutsideFunction]
+yield 1
+yield
+[out]
+main:1: error: 'yield' outside function
+main:2: error: 'yield' outside function
+
+[case testInvalidLvalues1]
+1 = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues2]
+(1) = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues3]
+(1, 1) = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues4]
+[1, 1] = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues5]
+() = 1
+[out]
+main:1: error: can't assign to ()
+
+[case testInvalidLvalues6]
+x = y = z = 1  # ok
+x, (y, 1) = 1
+[out]
+main:2: error: can't assign to literal
+
+[case testInvalidLvalues7]
+x, [y, 1] = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues8]
+x, [y, [z, 1]] = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues9]
+x, (y) = 1 # ok
+x, (y, (z, z)) = 1 # ok
+x, (y, (z, 1)) = 1
+[out]
+main:3: error: can't assign to literal
+
+[case testInvalidLvalues10]
+x + x = 1
+[out]
+main:1: error: can't assign to operator
+
+[case testInvalidLvalues11]
+-x = 1
+[out]
+main:1: error: can't assign to operator
+
+[case testInvalidLvalues12]
+1.1 = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues13]
+'x' = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues14]
+x() = 1
+[out]
+main:1: error: can't assign to function call
+
+[case testTwoStarExpressions]
+a, *b, *c = 1
+*a, (*b, c) = 1
+a, (*b, *c) = 1
+[*a, *b] = 1
+[out]
+main:1: error: Two starred expressions in assignment
+main:3: error: Two starred expressions in assignment
+main:4: error: Two starred expressions in assignment
+
+[case testTwoStarExpressionsInForStmt]
+z = 1
+for a, *b, *c in z:
+    pass
+for *a, (*b, c) in z:
+    pass
+for a, (*b, *c) in z:
+    pass
+for [*a, *b] in z:
+    pass
+[out]
+main:2: error: Two starred expressions in assignment
+main:6: error: Two starred expressions in assignment
+main:8: error: Two starred expressions in assignment
+
+[case testTwoStarExpressionsInGeneratorExpr]
+(a for a, *b, *c in [])
+(a for *a, (*b, c) in [])
+(a for a, (*b, *c) in [])
+[out]
+main:1: error: Name 'a' is not defined
+main:1: error: Two starred expressions in assignment
+main:3: error: Two starred expressions in assignment
+
+[case testStarExpressionRhs]
+b = 1
+c = 1
+d = 1
+a = *b
+[out]
+main:4: error: Can use starred expression only as assignment target
+
+[case testStarExpressionInExp]
+a = 1
+*a + 1
+[out]
+main:2: error: Can use starred expression only as assignment target
+
+[case testInvalidDel1]
+x = 1
+del x(1)  # E: can't delete function call
+[out]
+
+[case testInvalidDel2]
+x = 1
+del x + 1 # E: can't delete operator
+[out]
+
+[case testInvalidDel3]
+del z     # E: Name 'z' is not defined
+[out]
+
+[case testFunctionTvarScope]
+from typing import TypeVar
+t = TypeVar('t')
+def f(x: t) -> t: pass
+x = 0 # type: t
+[out]
+main:4: error: Invalid type "__main__.t"
+
+[case testClassTvarScope]
+from typing import Generic, TypeVar
+t = TypeVar('t')
+class c(Generic[t]): pass
+x = 0 # type: t
+[out]
+main:4: error: Invalid type "__main__.t"
+
+[case testExpressionRefersToTypeVariable]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class c(Generic[t]):
+    def f(self) -> None: x = t
+def f(y: t): x = t
+[out]
+main:4: error: 't' is a type variable and only valid in type context
+main:5: error: 't' is a type variable and only valid in type context
+
+[case testMissingSelf]
+import typing
+class A:
+  def f(): pass
+[out]
+main:3: error: Method must have at least one argument
+
+[case testInvalidBaseClass]
+import typing
+class A(B): pass
+[out]
+main:2: error: Name 'B' is not defined
+
+[case testSuperOutsideClass]
+class A: pass
+super().x
+def f() -> None: super().y
+[out]
+main:2: error: "super" used outside class
+main:3: error: "super" used outside class
+
+[case testMissingSelfInMethod]
+import typing
+class A:
+  def f() -> None: pass
+  def g(): pass
+[out]
+main:3: error: Method must have at least one argument
+main:4: error: Method must have at least one argument
+
+[case testMultipleMethodDefinition]
+import typing
+class A:
+  def f(self) -> None: pass
+  def g(self) -> None: pass
+  def f(self, x: object) -> None: pass
+[out]
+main:5: error: Name 'f' already defined
+
+[case testInvalidGlobalDecl]
+import typing
+def f() -> None:
+    global x
+    x = None
+[out]
+main:4: error: Name 'x' is not defined
+
+[case testInvalidNonlocalDecl]
+import typing
+def f():
+    def g() -> None:
+       nonlocal x
+       x = None
+[out]
+main:4: error: No binding for nonlocal 'x' found
+main:5: error: Name 'x' is not defined
+
+[case testNonlocalDeclNotMatchingGlobal]
+import typing
+x = None
+def f() -> None:
+    nonlocal x
+    x = None
+[out]
+main:4: error: No binding for nonlocal 'x' found
+main:5: error: Name 'x' is not defined
+
+[case testNonlocalDeclConflictingWithParameter]
+import typing
+def g():
+    x = None
+    def f(x) -> None:
+        nonlocal x
+        x = None
+[out]
+main:5: error: Name 'x' is already defined in local scope before nonlocal declaration
+
+[case testNonlocalDeclOutsideFunction]
+x = 2
+nonlocal x
+[out]
+main:2: error: nonlocal declaration not allowed at module level
+
+[case testGlobalAndNonlocalDecl]
+import typing
+x = 1
+def f():
+    x = 1
+    def g() -> None:
+       global x
+       nonlocal x
+       x = None
+[out]
+main:7: error: Name 'x' is nonlocal and global
+
+[case testNonlocalAndGlobalDecl]
+import typing
+x = 1
+def f():
+    x = 1
+    def g() -> None:
+       nonlocal x
+       global x
+       x = None
+[out]
+main:7: error: Name 'x' is nonlocal and global
+
+[case testNestedFunctionAndScoping]
+import typing
+def f(x) -> None:
+    def g(y):
+        z = x
+    z
+    y
+    x
+[out]
+main:5: error: Name 'z' is not defined
+main:6: error: Name 'y' is not defined
+
+[case testMultipleNestedFunctionDef]
+import typing
+def f(x) -> None:
+    def g(): pass
+    x = 1
+    def g(): pass
+[out]
+main:5: error: Name 'g' already defined
+
+[case testRedefinedOverloadedFunction]
+from typing import overload, Any
+def f() -> None:
+    @overload
+    def p(o: object) -> None: pass # no error
+    @overload
+    def p(o: Any) -> None: pass    # no error
+    x = 1
+    def p(): pass # fail
+[out]
+main:3: error: An overloaded function outside a stub file must have an implementation
+main:8: error: Name 'p' already defined
+
+[case testNestedFunctionInMethod]
+import typing
+class A:
+   def f(self) -> None:
+       def g() -> None:
+           x
+       y
+[out]
+main:5: error: Name 'x' is not defined
+main:6: error: Name 'y' is not defined
+
+[case testImportScope]
+import typing
+def f() -> None:
+    import x
+x.y # E: Name 'x' is not defined
+[file x.py]
+y = 1
+[out]
+
+[case testImportScope2]
+import typing
+def f() -> None:
+    from x import y
+    y
+y # E: Name 'y' is not defined
+[file x.py]
+y = 1
+[out]
+
+[case testImportScope3]
+import typing
+def f() -> None:
+    from x import *
+    y
+y # E: Name 'y' is not defined
+[file x.py]
+y = 1
+[out]
+
+[case testImportScope4]
+import typing
+class A:
+    from x import *
+    y
+y # E: Name 'y' is not defined
+[file x.py]
+y = 1
+[out]
+
+[case testScopeOfNestedClass]
+import typing
+def f():
+    class A: pass
+    A
+A # E: Name 'A' is not defined
+[out]
+
+[case testScopeOfNestedClass2]
+import typing
+class A:
+    class B: pass
+B # E: Name 'B' is not defined
+[out]
+
+[case testScopeOfNestedClass3]
+import typing
+class A:
+    def f(self):
+        class B: pass
+    B # E: Name 'B' is not defined
+B # E: Name 'B' is not defined
+[out]
+
+[case testInvalidNestedClassReferenceInDecl]
+import typing
+class A: pass
+foo = 0 # type: A.x      # E: Name 'A.x' is not defined
+[out]
+
+[case testTvarScopingWithNestedClass]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+s = TypeVar('s')
+class A(Generic[t]):
+    class B(Generic[s]):
+        x = 0 # type: A[s]
+        y = 0 # type: A[t]        # E: Invalid type "__main__.t"
+    z = 0 # type: A[s]            # E: Invalid type "__main__.s"
+    a = 0 # type: A[t]
+[out]
+
+[case testTestExtendPrimitives]
+class C(bool): pass # E: 'bool' is not a valid base class
+class A(int): pass # ok
+class B(float): pass # ok
+class D(str): pass # ok
+[builtins fixtures/primitives.pyi]
+[out]
+
+[case testCyclicInheritance]
+class A(A): pass # E: Cycle in inheritance hierarchy
+[out]
+
+[case testAssignToTypeDef]
+import typing
+class A: pass
+A = None # E: Cannot assign to a type
+[out]
+
+[case testInvalidCastTargetSyntax]
+from typing import cast, TypeVar, Generic
+t = TypeVar('t')
+class C(Generic[t]): pass
+cast(str + str, None)    # E: Cast target is not a type
+cast(C[str][str], None)  # E: Cast target is not a type
+cast(C[str + str], None) # E: Cast target is not a type
+cast([int, str], None)   # E: Invalid type
+[out]
+
+[case testInvalidCastTargetType]
+from typing import cast
+x = 0
+cast(x, None)        # E: Invalid type "__main__.x"
+cast(t, None)        # E: Name 't' is not defined
+cast(__builtins__.x, None) # E: Name '__builtins__.x' is not defined
+[out]
+
+[case testInvalidCastTargetType2]
+from typing import cast
+x = 0
+cast(str[str], None) # E: "str" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfArgsToCast]
+from typing import cast
+cast(str) # E: 'cast' expects 2 arguments
+cast(str, None, None) # E: 'cast' expects 2 arguments
+[out]
+
+[case testInvalidKindsOfArgsToCast]
+from typing import cast
+cast(str, *None) # E: 'cast' must be called with 2 positional arguments
+cast(str, target=None) # E: 'cast' must be called with 2 positional arguments
+[out]
+
+[case testInvalidAnyCall]
+from typing import Any
+Any(str, None)  # E: Any(...) is no longer supported. Use cast(Any, ...) instead
+Any(arg=str)  # E: Any(...) is no longer supported. Use cast(Any, ...) instead
+[out]
+
+[case testTypeListAsType]
+def f(x:[int, str]) -> None: # E: Invalid type
+    pass
+[out]
+
+[case testInvalidFunctionType]
+from typing import Callable
+x = None # type: Callable[int, str]
+y = None # type: Callable[int]
+z = None # type: Callable[int, int, int]
+[out]
+main:2: error: The first argument to Callable must be a list of types or "..."
+main:3: error: Invalid function type
+main:4: error: Invalid function type
+
+[case testAbstractGlobalFunction]
+import typing
+from abc import abstractmethod
+ at abstractmethod
+def foo(): pass
+[out]
+main:3: error: 'abstractmethod' used with a non-method
+
+[case testAbstractNestedFunction]
+import typing
+from abc import abstractmethod
+def g() -> None:
+  @abstractmethod
+  def foo(): pass
+[out]
+main:4: error: 'abstractmethod' used with a non-method
+
+[case testInvalidTypeDeclaration]
+import typing
+def f(): pass
+f() = 1 # type: int
+[out]
+main:3: error: can't assign to function call
+
+[case testIndexedAssignmentWithTypeDeclaration]
+import typing
+None[1] = 1 # type: int
+[out]
+main:2: error: Unexpected type declaration
+
+[case testNonSelfMemberAssignmentWithTypeDeclaration]
+import typing
+None.x = 1 # type: int
+[out]
+main:2: error: Type cannot be declared in assignment to non-self attribute
+
+[case testNonSelfMemberAssignmentWithTypeDeclarationInMethod]
+import typing
+class A:
+  def f(self, x) -> None:
+    x.y = 1 # type: int
+[out]
+main:4: error: Type cannot be declared in assignment to non-self attribute
+
+[case testInvalidTypeInTypeApplication]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+A[TypeVar] # E: Invalid type "typing.TypeVar"
+[out]
+
+[case testInvalidTypeInTypeApplication2]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+A[1] # E: Type expected within [...]
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypes]
+x, y = 1, 2 # type: int, str, int # E: Incompatible number of tuple items
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypesNested]
+x, (y, z) = 1, (2, 3) # type: int, (str, int, int) # E: Incompatible number of tuple items
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypesNested2]
+x, (y, z) = 1, (2, 3) # type: int, (str, ) # E: Incompatible number of tuple items
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypesNested3]
+x, (y, z) = 1, (2, 3) # type: int, str # E: Tuple type expected for multiple variables
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypesNested4]
+x, (y, z) = 1, (2, 3) # type: int, str, int # E: Incompatible number of tuple items
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypesNested5]
+x, (y, ) = 1, (2, ) # type: int, str # E: Tuple type expected for multiple variables
+[out]
+
+[case testVariableDeclWithInvalidType]
+x, y = 1, 2 # type: int # E: Tuple type expected for multiple variables
+[out]
+
+[case testInvalidLvalueWithExplicitType]
+a = 1
+a() = None # type: int  # E: can't assign to function call
+[out]
+
+[case testInvalidLvalueWithExplicitType2]
+a = 1
+a[1] = None # type: int # E: Unexpected type declaration
+a.x = None # type: int \
+    # E: Type cannot be declared in assignment to non-self attribute
+[out]
+
+[case testInvalidLvalueWithExplicitType3]
+a = 1
+a.y, a.x = None, None # type: int, int \
+    # E: Type cannot be declared in assignment to non-self attribute
+a[1], a[2] = None, None # type: int, int \
+    # E: Unexpected type declaration
+[out]
+
+[case testMissingGenericImport]
+from typing import TypeVar
+T = TypeVar('T')
+class A(Generic[T]): pass
+[out]
+main:3: error: Name 'Generic' is not defined
+
+[case testInvalidTypeWithinGeneric]
+from typing import Generic
+class A(Generic[int]): pass # E: Free type variable expected in Generic[...]
+[out]
+
+[case testInvalidTypeWithinNestedGenericClass]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+class A(Generic[T]):
+    class B(Generic[T]): pass \
+          # E: Free type variable expected in Generic[...]
+[out]
+
+[case testIncludingGenericTwiceInBaseClassList]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+class A(Generic[T], Generic[S]): pass \
+      # E: Duplicate Generic in bases
+[out]
+
+[case testInvalidMetaclass]
+class A(metaclass=x): pass # E: Name 'x' is not defined
+[out]
+
+[case testInvalidQualifiedMetaclass]
+import abc
+class A(metaclass=abc.Foo): pass # E: Name 'abc.Foo' is not defined
+[out]
+
+[case testNonClassMetaclass]
+def f(): pass
+class A(metaclass=f): pass # E: Invalid metaclass 'f'
+[out]
+
+[case testInvalidTypevarArguments]
+from typing import TypeVar
+a = TypeVar()       # E: Too few arguments for TypeVar()
+b = TypeVar(x='b')  # E: TypeVar() expects a string literal as first argument
+c = TypeVar(1)      # E: TypeVar() expects a string literal as first argument
+d = TypeVar('D')    # E: String argument 1 'D' to TypeVar(...) does not match variable name 'd'
+e = TypeVar('e', int, str, x=1)   # E: Unexpected argument to TypeVar(): x
+f = TypeVar('f', (int, str), int) # E: Type expected
+g = TypeVar('g', int)             # E: TypeVar cannot have only a single constraint
+h = TypeVar('h', x=(int, str))    # E: Unexpected argument to TypeVar(): x
+i = TypeVar('i', bound=1)         # E: TypeVar 'bound' must be a type
+[out]
+
+[case testMoreInvalidTypevarArguments]
+from typing import TypeVar
+T = TypeVar('T', int, str, bound=bool) # E: TypeVar cannot have both values and an upper bound
+S = TypeVar('S', covariant=True, contravariant=True) \
+    # E: TypeVar cannot be both covariant and contravariant
+[builtins fixtures/bool.pyi]
+
+[case testInvalidTypevarValues]
+from typing import TypeVar
+b = TypeVar('b', *[int]) # E: Unexpected argument to TypeVar()
+c = TypeVar('c', int, 2) # E: Type expected
+[out]
+
+[case testObsoleteTypevarValuesSyntax]
+from typing import TypeVar
+a = TypeVar('a', values=(int, str))
+[out]
+main:2: error: TypeVar 'values' argument not supported
+main:2: error: Use TypeVar('T', t, ...) instead of TypeVar('T', values=(t, ...))
+
+[case testLocalTypevarScope]
+from typing import TypeVar
+def f() -> None:
+    T = TypeVar('T')
+def g(x: T) -> None: pass # E: Name 'T' is not defined
+[out]
+
+[case testClassTypevarScope]
+from typing import TypeVar
+class A:
+    T = TypeVar('T')
+def g(x: T) -> None: pass # E: Name 'T' is not defined
+[out]
+
+[case testRedefineVariableAsTypevar]
+from typing import TypeVar
+x = 0
+x = TypeVar('x') # E: Cannot redefine 'x' as a type variable
+[out]
+
+[case testTypevarWithType]
+from typing import TypeVar
+x = TypeVar('x') # type: int # E: Cannot declare the type of a type variable
+[out]
+
+[case testRedefineTypevar]
+from typing import TypeVar
+t = TypeVar('t')
+t = 1 # E: Invalid assignment target
+[out]
+
+[case testRedefineTypevar2]
+from typing import TypeVar
+t = TypeVar('t')
+def t(): pass # E: Name 't' already defined on line 2
+[out]
+
+[case testRedefineTypevar3]
+from typing import TypeVar
+t = TypeVar('t')
+class t: pass # E: Name 't' already defined on line 2
+[out]
+
+[case testRedefineTypevar4]
+from typing import TypeVar
+t = TypeVar('t')
+from typing import Generic as t # E: Name 't' already defined
+[out]
+
+[case testInvalidStrLiteralType]
+def f(x: 'foo'): pass # E: Name 'foo' is not defined
+[out]
+
+[case testInvalidStrLiteralType2]
+def f(x: 'int['): pass # E: syntax error in type comment
+[out]
+
+[case testInconsistentOverload]
+from typing import overload
+def dec(x): pass
+ at dec  # E: The implementation for an overloaded function must come last
+def f(): pass
+ at overload
+def f(): pass
+[out]
+
+[case testInconsistentOverload2]
+from typing import overload
+def dec(x): pass
+ at dec  # E: The implementation for an overloaded function must come last
+def f(): pass
+ at overload
+def f(): pass
+[out]
+
+[case testMissingOverloadDecorator]
+from typing import overload
+def dec(x): pass
+ at dec
+def f(): pass
+ at dec  # E: Name 'f' already defined
+def f(): pass
+[out]
+
+[case testIncompatibleSignatureInComment]
+import typing
+def f(): # type: (int) -> int
+  pass
+def g(x): # type: () -> int
+  pass
+[out]
+main:2: error: Type signature has too many arguments
+main:4: error: Type signature has too few arguments
+
+[case testStaticmethodAndNonMethod]
+import typing
+ at staticmethod
+def f(): pass
+class A:
+  def g(self) -> None:
+    @staticmethod
+    def h(): pass
+[builtins fixtures/staticmethod.pyi]
+[out]
+main:2: error: 'staticmethod' used with a non-method
+main:6: error: 'staticmethod' used with a non-method
+
+[case testClassmethodAndNonMethod]
+import typing
+ at classmethod
+def f(): pass
+class A:
+  def g(self) -> None:
+    @classmethod
+    def h(): pass
+[builtins fixtures/classmethod.pyi]
+[out]
+main:2: error: 'classmethod' used with a non-method
+main:6: error: 'classmethod' used with a non-method
+
+[case testNonMethodProperty]
+import typing
+ at property  # E: 'property' used with a non-method
+def f() -> int: pass
+[builtins fixtures/property.pyi]
+[out]
+
+[case testInvalidArgCountForProperty]
+import typing
+class A:
+    @property
+    def f(self, x) -> int: pass  # E: Too many arguments
+    @property
+    def g() -> int: pass   # E: Method must have at least one argument
+[builtins fixtures/property.pyi]
+[out]
+
+[case testOverloadedProperty]
+from typing import overload
+class A:
+    @overload  # E: Decorated property not supported
+    @property
+    def f(self) -> int: pass
+    @property  # E: Decorated property not supported
+    @overload
+    def f(self) -> int: pass
+[builtins fixtures/property.pyi]
+[out]
+
+[case testOverloadedProperty2]
+from typing import overload
+class A:
+    @overload  # E: An overloaded function outside a stub file must have an implementation
+    def f(self) -> int: pass
+    @property  # E: Decorated property not supported
+    @overload
+    def f(self) -> int: pass
+[builtins fixtures/property.pyi]
+[out]
+
+[case testDecoratedProperty]
+import typing
+def dec(f): pass
+class A:
+    @dec  # E: Decorated property not supported
+    @property
+    def f(self) -> int: pass
+    @property  # E: Decorated property not supported
+    @dec
+    def g(self) -> int: pass
+[builtins fixtures/property.pyi]
+[out]
+
+[case testImportTwoModulesWithSameNameInFunction]
+import typing
+def f() -> None:
+    import x
+    import y as x # E: Name 'x' already defined
+    x.y
+[file x.py]
+y = 1
+[file y.py]
+[out]
+
+[case testImportTwoModulesWithSameNameInGlobalContext]
+import typing
+import x
+import y as x # E: Name 'x' already defined
+x.y
+[file x.py]
+y = 1
+[file y.py]
+[out]
+
+[case testListTypeAliasWithoutImport]
+import typing
+def f() -> List[int]: pass
+[builtins fixtures/list.pyi]
+[out]
+main:2: error: Name 'List' is not defined
+
+[case testImportObsoleteTypingFunction]
+from typing import Function # E: Module 'typing' has no attribute 'Function' (it's now called 'typing.Callable')
+from _m import Function # E: Module '_m' has no attribute 'Function'
+[file _m.py]
+[out]
+
+[case testTypeRefresToObsoleteTypingFunction]
+import typing
+import _m
+def f(x: typing.Function[[], None]) -> None: pass
+def g(x: _m.Function[[], None]) -> None: pass
+[file _m.py]
+[out]
+main:3: error: Name 'typing.Function' is not defined (it's now called 'typing.Callable')
+--'
+main:4: error: Name '_m.Function' is not defined
+
+[case testUnqualifiedNameRefersToObsoleteTypingFunction]
+x = None # type: Function[[], None]
+[out]
+main:1: error: Name 'Function' is not defined
+main:1: note: (Did you mean 'typing.Callable'?)
+
+[case testInvalidWithTarget]
+def f(): pass
+with f() as 1: pass  # E: can't assign to literal
+[out]
+
+[case testUseObsoleteNameForTypeVar]
+from typing import typevar
+t = typevar('t')
+[out]
+main:1: error: Module 'typing' has no attribute 'typevar' (it's now called 'typing.TypeVar')
+--' (this fixes syntax highlighting)
+
+[case testUseObsoleteNameForTypeVar2]
+t = typevar('t')
+[out]
+main:1: error: Name 'typevar' is not defined
+main:1: note: (Did you mean 'typing.TypeVar'?)
+
+[case testUseObsoleteNameForTypeVar3]
+import typing
+t = typing.typevar('t')
+[out]
+main:2: error: Module 'typing' has no attribute 'typevar' (it's now called 'typing.TypeVar')
+--' (work around syntax highlighting :-/)
+
+[case testInvalidTypeAnnotation]
+import typing
+def f() -> None:
+    1[2] = 1  # type: int
+[out]
+main:3: error: Unexpected type declaration
+
+[case testInvalidTypeAnnotation2]
+import typing
+def f() -> None:
+    f() = 1  # type: int
+[out]
+main:3: error: can't assign to function call
+
+[case testInvalidReferenceToAttributeOfOuterClass]
+class A:
+    class X: pass
+    class B:
+        y = X  # E: Name 'X' is not defined
+[out]
+
+[case testStubPackage]
+from m import x
+from m import y # E: Module 'm' has no attribute 'y'
+[file m/__init__.pyi]
+x = 1
+[out]
+
+[case testStubPackageSubModule]
+from m import x
+from m import y # E: Module 'm' has no attribute 'y'
+from m.m2 import y
+from m.m2 import z # E: Module 'm.m2' has no attribute 'z'
+[file m/__init__.pyi]
+x = 1
+[file m/m2.pyi]
+y = 1
+[out]
+
+[case testMissingStubForThirdPartyModule]
+import __dummy_third_party1
+[out]
+main:1: error: No library stub file for module '__dummy_third_party1'
+main:1: note: (Stub files are from https://github.com/python/typeshed)
+
+[case testMissingStubForStdLibModule]
+import __dummy_stdlib1
+[out]
+main:1: error: No library stub file for standard library module '__dummy_stdlib1'
+main:1: note: (Stub files are from https://github.com/python/typeshed)
+
+[case testMissingStubForTwoModules]
+import __dummy_stdlib1
+import __dummy_stdlib2
+[out]
+main:1: error: No library stub file for standard library module '__dummy_stdlib1'
+main:1: note: (Stub files are from https://github.com/python/typeshed)
+main:2: error: No library stub file for standard library module '__dummy_stdlib2'
+
+[case testListComprehensionSpecialScoping]
+class A:
+    x = 1
+    y = 1
+    z = 1
+    [x for i in z if y]
+[out]
+main:5: error: Name 'x' is not defined
+main:5: error: Name 'y' is not defined
+
+[case testTypeRedeclarationNoSpuriousWarnings]
+from typing import Tuple
+a = 1  # type: int
+a = 's'  # type: str
+a = ('spam', 'spam', 'eggs', 'spam')  # type: Tuple[str]
+
+[out]
+main:3: error: Name 'a' already defined
+main:4: error: Name 'a' already defined
+
+[case testDuplicateDefFromImport]
+from m import A
+class A:  # E: Name 'A' already defined (possibly by an import)
+    pass
+[file m.py]
+class A:
+    pass
+[out]
+
+[case testDuplicateDefDec]
+from typing import Any
+def dec(x: Any) -> Any:
+    return x
+ at dec
+def f() -> None:
+    pass
+ at dec  # E: Name 'f' already defined
+def f() -> None:
+    pass
+[out]
+
+[case testDuplicateDefOverload]
+from typing import overload, Any
+if 1:
+    @overload
+    def f(x: int) -> None:
+        pass
+    @overload
+    def f(x: str) -> None:
+        pass
+    def f(x: Any) -> None:
+        pass
+else:
+    def f(x: str) -> None:  # E: Name 'f' already defined on line 3
+        pass
+[out]
+
+[case testDuplicateDefNT]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int),
+                     ('b', str)])
+
+class N:  # E: Name 'N' already defined on line 2
+    pass
+[out]
+
+[case testDuplicateDefTypedDict]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+
+class Point:  # E: Name 'Point' already defined on line 2
+    pass
+[builtins fixtures/dict.pyi]
+
+[out]
+
+[case testTypeVarClassDup]
+from typing import TypeVar
+T = TypeVar('T')
+class T: ...  # E: Name 'T' already defined on line 2
+
+[out]
+
+[case testAliasDup]
+from typing import List
+A = List[int]
+class A: ... # E: Name 'A' already defined on line 2
+
+[builtins fixtures/list.pyi]
+[out]
diff --git a/test-data/unit/semanal-expressions.test b/test-data/unit/semanal-expressions.test
new file mode 100644
index 0000000..32a28f7
--- /dev/null
+++ b/test-data/unit/semanal-expressions.test
@@ -0,0 +1,395 @@
+[case testLiterals]
+(1, 'x', 1.1, 1.1j)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1(
+      IntExpr(1)
+      StrExpr(x)
+      FloatExpr(1.1)
+      ComplexExpr(1.1j))))
+
+[case testMemberExpr]
+x = 1
+x.y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(x [__main__.x])
+      y)))
+
+[case testIndexExpr]
+x = y = 1
+x[y]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    IndexExpr:2(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))))
+
+[case testBinaryOperations]
+x = y = 1
+x + y
+x | y
+x is not y
+x == y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    OpExpr:2(
+      +
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y])))
+  ExpressionStmt:3(
+    OpExpr:3(
+      |
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y])))
+  ExpressionStmt:4(
+    ComparisonExpr:4(
+      is not
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y])))
+  ExpressionStmt:5(
+    ComparisonExpr:5(
+      ==
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))))
+
+[case testUnaryOperations]
+x = 1
+-x
+~x
++x
+not x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  ExpressionStmt:2(
+    UnaryExpr:2(
+      -
+      NameExpr(x [__main__.x])))
+  ExpressionStmt:3(
+    UnaryExpr:3(
+      ~
+      NameExpr(x [__main__.x])))
+  ExpressionStmt:4(
+    UnaryExpr:4(
+      +
+      NameExpr(x [__main__.x])))
+  ExpressionStmt:5(
+    UnaryExpr:5(
+      not
+      NameExpr(x [__main__.x]))))
+
+[case testSlices]
+x = y = z = 1
+x[y:z:x]
+x[:]
+x[:y]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y])
+      NameExpr(z* [__main__.z]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    IndexExpr:2(
+      NameExpr(x [__main__.x])
+      SliceExpr:-1(
+        NameExpr(y [__main__.y])
+        NameExpr(z [__main__.z])
+        NameExpr(x [__main__.x]))))
+  ExpressionStmt:3(
+    IndexExpr:3(
+      NameExpr(x [__main__.x])
+      SliceExpr:-1(
+        <empty>
+        <empty>)))
+  ExpressionStmt:4(
+    IndexExpr:4(
+      NameExpr(x [__main__.x])
+      SliceExpr:-1(
+        <empty>
+        NameExpr(y [__main__.y])))))
+
+[case testTupleLiteral]
+x = y = 1
+x, y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    TupleExpr:2(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))))
+
+[case testListLiteral]
+x = y = 1
+([], [x, y])
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    TupleExpr:2(
+      ListExpr:2()
+      ListExpr:2(
+        NameExpr(x [__main__.x])
+        NameExpr(y [__main__.y])))))
+
+[case testDictLiterals]
+x = y = 1
+{ x : y, y : x }
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    DictExpr:2(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y])
+      NameExpr(y [__main__.y])
+      NameExpr(x [__main__.x]))))
+
+[case testListComprehension]
+a = 0
+([x + 1 for x in a])
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  ExpressionStmt:2(
+    ListComprehension:2(
+      GeneratorExpr:2(
+        OpExpr:2(
+          +
+          NameExpr(x [l])
+          IntExpr(1))
+        NameExpr(x* [l])
+        NameExpr(a [__main__.a])))))
+
+[case testListComprehensionInFunction]
+def f(a) -> None:
+    [x for x in a]
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(a))
+    def (a: Any)
+    Block:1(
+      ExpressionStmt:2(
+        ListComprehension:2(
+          GeneratorExpr:2(
+            NameExpr(x [l])
+            NameExpr(x* [l])
+            NameExpr(a [l])))))))
+
+[case testListComprehensionWithCondition]
+a = 0
+a = [x for x in a if x]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  AssignmentStmt:2(
+    NameExpr(a [__main__.a])
+    ListComprehension:2(
+      GeneratorExpr:2(
+        NameExpr(x [l])
+        NameExpr(x* [l])
+        NameExpr(a [__main__.a])
+        NameExpr(x [l])))))
+
+[case testSetComprehension]
+a = 0
+({x + 1 for x in a})
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  ExpressionStmt:2(
+    SetComprehension:2(
+      GeneratorExpr:2(
+        OpExpr:2(
+          +
+          NameExpr(x [l])
+          IntExpr(1))
+        NameExpr(x* [l])
+        NameExpr(a [__main__.a])))))
+
+[case testSetComprehensionWithCondition]
+a = 0
+a = {x for x in a if x}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  AssignmentStmt:2(
+    NameExpr(a [__main__.a])
+    SetComprehension:2(
+      GeneratorExpr:2(
+        NameExpr(x [l])
+        NameExpr(x* [l])
+        NameExpr(a [__main__.a])
+        NameExpr(x [l])))))
+
+[case testDictionaryComprehension]
+a = 0
+({x: x + 1 for x in a})
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  ExpressionStmt:2(
+    DictionaryComprehension:2(
+      NameExpr(x [l])
+      OpExpr:2(
+        +
+        NameExpr(x [l])
+        IntExpr(1))
+      NameExpr(x* [l])
+      NameExpr(a [__main__.a]))))
+
+[case testDictionaryComprehensionWithCondition]
+a = 0
+a = {x: x + 1 for x in a if x}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  AssignmentStmt:2(
+    NameExpr(a [__main__.a])
+    DictionaryComprehension:2(
+      NameExpr(x [l])
+      OpExpr:2(
+        +
+        NameExpr(x [l])
+        IntExpr(1))
+      NameExpr(x* [l])
+      NameExpr(a [__main__.a])
+      NameExpr(x [l]))))
+
+[case testGeneratorExpression]
+a = 0
+(x for x in a)
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  ExpressionStmt:2(
+    GeneratorExpr:2(
+      NameExpr(x [l])
+      NameExpr(x* [l])
+      NameExpr(a [__main__.a]))))
+
+[case testGeneratorExpressionNestedIndex]
+a = 0
+(x for x, (y, z) in a)
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  ExpressionStmt:2(
+    GeneratorExpr:2(
+      NameExpr(x [l])
+      TupleExpr:2(
+        NameExpr(x* [l])
+        TupleExpr:2(
+          NameExpr(y* [l])
+          NameExpr(z* [l])))
+      NameExpr(a [__main__.a]))))
+
+[case testLambda]
+x = 0
+lambda: x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(0))
+  ExpressionStmt:2(
+    LambdaExpr:2(
+      Block:2(
+        ReturnStmt:2(
+          NameExpr(x [__main__.x]))))))
+
+[case testLambdaWithArguments]
+lambda x, y: x + y
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    LambdaExpr:1(
+      Args(
+        Var(x)
+        Var(y))
+      Block:1(
+        ReturnStmt:1(
+          OpExpr:1(
+            +
+            NameExpr(x [l])
+            NameExpr(y [l])))))))
+
+[case testConditionalExpression]
+int if None else str
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ConditionalExpr:1(
+      Condition(
+        NameExpr(None [builtins.None]))
+      NameExpr(int [builtins.int])
+      NameExpr(str [builtins.str]))))
+
+[case testDictWithKeywordArgs]
+dict(a=1, b=str())
+[builtins fixtures/dict.pyi]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    DictExpr:1(
+      StrExpr(a)
+      IntExpr(1)
+      StrExpr(b)
+      CallExpr:1(
+        NameExpr(str [builtins.str])
+        Args()))))
diff --git a/test-data/unit/semanal-modules.test b/test-data/unit/semanal-modules.test
new file mode 100644
index 0000000..7a00e66
--- /dev/null
+++ b/test-data/unit/semanal-modules.test
@@ -0,0 +1,877 @@
+-- NOTE: If a module has a name starting or ending with _, it is skipped in
+--       output.
+
+[case testImport]
+import x
+x.y
+[file x.py]
+y = 1
+[out]
+MypyFile:1(
+  Import:1(x)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(x)
+      y [x.y])))
+MypyFile:1(
+  tmp/x.py
+  AssignmentStmt:1(
+    NameExpr(y* [x.y])
+    IntExpr(1)))
+
+[case testImportedNameInType]
+import m
+x = None # type: m.c
+[file m.py]
+class c: pass
+[out]
+MypyFile:1(
+  Import:1(m)
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    m.c))
+MypyFile:1(
+  tmp/m.py
+  ClassDef:1(
+    c
+    PassStmt:1()))
+
+[case testImportFrom]
+from m import y
+x = y
+[file m.py]
+y = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [y])
+  AssignmentStmt:2(
+    NameExpr(x* [__main__.x])
+    NameExpr(y [m.y])))
+MypyFile:1(
+  tmp/m.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.y])
+    IntExpr(1)))
+
+[case testImportFromType]
+from m import c
+x = None # type: c
+[file m.py]
+class c: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [c])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    m.c))
+MypyFile:1(
+  tmp/m.py
+  ClassDef:1(
+    c
+    PassStmt:1()))
+
+[case testImportMultiple]
+import _m, _n
+_m.x, _n.y
+[file _m.py]
+x = 1
+[file _n.py]
+y = 2
+[out]
+MypyFile:1(
+  Import:1(_m, _n)
+  ExpressionStmt:2(
+    TupleExpr:2(
+      MemberExpr:2(
+        NameExpr(_m)
+        x [_m.x])
+      MemberExpr:2(
+        NameExpr(_n)
+        y [_n.y]))))
+
+[case testImportAs]
+import _m as n
+n.x
+[file _m.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(_m : n)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(n [_m])
+      x [_m.x])))
+
+[case testImportFromMultiple]
+from _m import x, y
+x, y
+[file _m.py]
+x = y = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(_m, [x, y])
+  ExpressionStmt:2(
+    TupleExpr:2(
+      NameExpr(x [_m.x])
+      NameExpr(y [_m.y]))))
+
+[case testImportFromAs]
+from _m import y as z
+z
+[file _m.py]
+y = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(_m, [y : z])
+  ExpressionStmt:2(
+    NameExpr(z [_m.y])))
+
+[case testAccessImportedName]
+from m import x
+y = x
+[file m.py]
+from _n import x
+[file _n.py]
+x = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [x])
+  AssignmentStmt:2(
+    NameExpr(y* [__main__.y])
+    NameExpr(x [_n.x])))
+MypyFile:1(
+  tmp/m.py
+  ImportFrom:1(_n, [x]))
+
+[case testAccessImportedName2]
+import _m
+y = _m.x
+[file _m.py]
+from _n import x
+[file _n.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(_m)
+  AssignmentStmt:2(
+    NameExpr(y* [__main__.y])
+    MemberExpr:2(
+      NameExpr(_m)
+      x [_n.x])))
+
+[case testAccessingImportedNameInType]
+from _m import c
+x = None # type: c
+[file _m.py]
+from _n import c
+[file _n.py]
+class c: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(_m, [c])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    _n.c))
+
+[case testAccessingImportedNameInType2]
+import _m
+x = None # type: _m.c
+[file _m.py]
+from _n import c
+[file _n.py]
+class c: pass
+[out]
+MypyFile:1(
+  Import:1(_m)
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    _n.c))
+
+[case testAccessingImportedModule]
+from _m import _n
+_n.x
+[file _m.py]
+import _n
+[file _n.py]
+x = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(_m, [_n])
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(_n)
+      x [_n.x])))
+
+[case testAccessingImportedModule]
+import _m
+_m._n.x
+[file _m.py]
+import _n
+[file _n.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(_m)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      MemberExpr:2(
+        NameExpr(_m)
+        _n)
+      x [_n.x])))
+
+[case testAccessTypeViaDoubleIndirection]
+from _m import c
+a = None # type: c
+[file _m.py]
+from _n import c
+[file _n.py]
+class c: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(_m, [c])
+  AssignmentStmt:2(
+    NameExpr(a [__main__.a])
+    NameExpr(None [builtins.None])
+    _n.c))
+
+[case testAccessTypeViaDoubleIndirection2]
+import _m
+a = None # type: _m.c
+[file _m.py]
+from _n import c
+[file _n.py]
+class c: pass
+[out]
+MypyFile:1(
+  Import:1(_m)
+  AssignmentStmt:2(
+    NameExpr(a [__main__.a])
+    NameExpr(None [builtins.None])
+    _n.c))
+
+[case testImportAsterisk]
+from _m import *
+x, y
+[file _m.py]
+x = y = 1
+[out]
+MypyFile:1(
+  ImportAll:1(_m)
+  ExpressionStmt:2(
+    TupleExpr:2(
+      NameExpr(x [_m.x])
+      NameExpr(y [_m.y]))))
+
+[case testImportAsteriskAndImportedNames]
+from _m import *
+n_.x, y
+[file _m.py]
+import n_
+from n_ import y
+[file n_.py]
+x = y = 1
+[out]
+MypyFile:1(
+  ImportAll:1(_m)
+  ExpressionStmt:2(
+    TupleExpr:2(
+      MemberExpr:2(
+        NameExpr(n_)
+        x [n_.x])
+      NameExpr(y [n_.y]))))
+
+[case testImportAsteriskAndImportedNamesInTypes]
+from _m import *
+x = None # type: n_.c
+y = None # type: d
+[file _m.py]
+import n_
+from n_ import d
+[file n_.py]
+class c: pass
+class d: pass
+[out]
+MypyFile:1(
+  ImportAll:1(_m)
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    n_.c)
+  AssignmentStmt:3(
+    NameExpr(y [__main__.y])
+    NameExpr(None [builtins.None])
+    n_.d))
+
+[case testModuleInSubdir]
+import _m
+_m.x
+[file _m/__init__.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(_m)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(_m)
+      x [_m.x])))
+
+[case testNestedModules]
+import m.n
+m.n.x, m.y
+[file m/__init__.py]
+y = 1
+[file m/n.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(m.n)
+  ExpressionStmt:2(
+    TupleExpr:2(
+      MemberExpr:2(
+        MemberExpr:2(
+          NameExpr(m)
+          n [m.n])
+        x [m.n.x])
+      MemberExpr:2(
+        NameExpr(m)
+        y [m.y]))))
+MypyFile:1(
+  tmp/m/n.py
+  AssignmentStmt:1(
+    NameExpr(x* [m.n.x])
+    IntExpr(1)))
+
+[case testImportFromSubmodule]
+from m._n import x
+x
+[file m/__init__.py]
+[file m/_n.py]
+x = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(m._n, [x])
+  ExpressionStmt:2(
+    NameExpr(x [m._n.x])))
+
+[case testImportAllFromSubmodule]
+from m._n import *
+x, y
+[file m/__init__.py]
+[file m/_n.py]
+x = y = 1
+[out]
+MypyFile:1(
+  ImportAll:1(m._n)
+  ExpressionStmt:2(
+    TupleExpr:2(
+      NameExpr(x [m._n.x])
+      NameExpr(y [m._n.y]))))
+
+[case testSubmodulesAndTypes]
+import m._n
+x = None # type: m._n.c
+[file m/__init__.py]
+[file m/_n.py]
+class c: pass
+[out]
+MypyFile:1(
+  Import:1(m._n)
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    m._n.c))
+
+[case testSubmodulesAndTypes]
+from m._n import c
+x = None # type: c
+[file m/__init__.py]
+[file m/_n.py]
+class c: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(m._n, [c])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    m._n.c))
+
+[case testFromPackageImportModule]
+from m import _n
+_n.x
+[file m/__init__.py]
+[file m/_n.py]
+x = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [_n])
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(_n [m._n])
+      x [m._n.x])))
+
+[case testDeeplyNestedModule]
+import m.n.k
+m.n.k.x
+m.n.b
+m.a
+[file m/__init__.py]
+a = 1
+[file m/n/__init__.py]
+b = 1
+[file m/n/k.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(m.n.k)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      MemberExpr:2(
+        MemberExpr:2(
+          NameExpr(m)
+          n [m.n])
+        k [m.n.k])
+      x [m.n.k.x]))
+  ExpressionStmt:3(
+    MemberExpr:3(
+      MemberExpr:3(
+        NameExpr(m)
+        n [m.n])
+      b [m.n.b]))
+  ExpressionStmt:4(
+    MemberExpr:4(
+      NameExpr(m)
+      a [m.a])))
+MypyFile:1(
+  tmp/m/n/k.py
+  AssignmentStmt:1(
+    NameExpr(x* [m.n.k.x])
+    IntExpr(1)))
+
+[case testImportInSubmodule]
+import m._n
+y = m._n.x
+[file m/__init__.py]
+[file m/_n.py]
+from m._k import x
+[file m/_k.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(m._n)
+  AssignmentStmt:2(
+    NameExpr(y* [__main__.y])
+    MemberExpr:2(
+      MemberExpr:2(
+        NameExpr(m)
+        _n [m._n])
+      x [m._k.x])))
+
+[case testBuiltinsUsingModule]
+o = None # type: __builtins__.object
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(o [__main__.o])
+    NameExpr(None [builtins.None])
+    builtins.object))
+
+[case testImplicitAccessToBuiltins]
+object
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(object [builtins.object])))
+
+[case testAssignmentToModuleAttribute]
+import _m
+_m.x = (
+  _m.x)
+[file _m.py]
+x = None
+[out]
+MypyFile:1(
+  Import:1(_m)
+  AssignmentStmt:2(
+    MemberExpr:2(
+      NameExpr(_m)
+      x [_m.x])
+    MemberExpr:3(
+      NameExpr(_m)
+      x [_m.x])))
+
+[case testAssignmentThatRefersToModule]
+import _m
+_m.x[None] = None
+[file _m.py]
+x = None
+[out]
+MypyFile:1(
+  Import:1(_m)
+  AssignmentStmt:2(
+    IndexExpr:2(
+      MemberExpr:2(
+        NameExpr(_m)
+        x [_m.x])
+      NameExpr(None [builtins.None]))
+    NameExpr(None [builtins.None])))
+
+[case testImportInBlock]
+if 1:
+    import _x
+    _x.y
+[file _x.py]
+y = 1
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      Import:2(_x)
+      ExpressionStmt:3(
+        MemberExpr:3(
+          NameExpr(_x)
+          y [_x.y])))))
+
+[case testImportInFunction]
+def f() -> None:
+    import _x
+    _x.y
+[file _x.py]
+y = 1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def ()
+    Block:1(
+      Import:2(_x)
+      ExpressionStmt:3(
+        MemberExpr:3(
+          NameExpr(_x)
+          y [_x.y])))))
+
+[case testImportInClassBody]
+class A:
+    from _x import y
+    z = y
+[file _x.py]
+y = 1
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ImportFrom:2(_x, [y])
+    AssignmentStmt:3(
+      NameExpr(z* [m])
+      NameExpr(y [_x.y]))))
+
+[case testImportInClassBody2]
+class A:
+    import _x
+    z = _x.y
+[file _x.py]
+y = 1
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    Import:2(_x)
+    AssignmentStmt:3(
+      NameExpr(z* [m])
+      MemberExpr:3(
+        NameExpr(_x)
+        y [_x.y]))))
+
+[case testImportModuleTwice]
+def f() -> None:
+    import x
+    import x
+    x.y
+[file x.py]
+y = 1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def ()
+    Block:1(
+      Import:2(x)
+      Import:3(x)
+      ExpressionStmt:4(
+        MemberExpr:4(
+          NameExpr(x)
+          y [x.y])))))
+MypyFile:1(
+  tmp/x.py
+  AssignmentStmt:1(
+    NameExpr(y* [x.y])
+    IntExpr(1)))
+
+[case testRelativeImport0]
+import m.x
+m.x.z.y
+[file m/__init__.py]
+[file m/x.py]
+from . import z
+[file m/z.py]
+y = 1
+[out]
+MypyFile:1(
+  Import:1(m.x)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      MemberExpr:2(
+        MemberExpr:2(
+          NameExpr(m)
+          x [m.x])
+        z [m.z])
+      y [m.z.y])))
+MypyFile:1(
+  tmp/m/x.py
+  ImportFrom:1(., [z]))
+MypyFile:1(
+  tmp/m/z.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.z.y])
+    IntExpr(1)))
+
+[case testRelativeImport1]
+import m.t.b as b
+b.x.y
+b.z.y
+[file m/__init__.py]
+[file m/x.py]
+y = 1
+[file m/z.py]
+y = 3
+[file m/t/__init__.py]
+[file m/t/b.py]
+from .. import x, z
+[out]
+MypyFile:1(
+  Import:1(m.t.b : b)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      MemberExpr:2(
+        NameExpr(b [m.t.b])
+        x [m.x])
+      y [m.x.y]))
+  ExpressionStmt:3(
+    MemberExpr:3(
+      MemberExpr:3(
+        NameExpr(b [m.t.b])
+        z [m.z])
+      y [m.z.y])))
+MypyFile:1(
+  tmp/m/t/b.py
+  ImportFrom:1(.., [x, z]))
+MypyFile:1(
+  tmp/m/x.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.x.y])
+    IntExpr(1)))
+MypyFile:1(
+  tmp/m/z.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.z.y])
+    IntExpr(3)))
+
+[case testRelativeImport2]
+import m.t.b as b
+b.xy
+b.zy
+[file m/__init__.py]
+[file m/x.py]
+y = 1
+[file m/z.py]
+y = 3
+[file m/t/__init__.py]
+[file m/t/b.py]
+from ..x import y as xy
+from ..z import y as zy
+[out]
+MypyFile:1(
+  Import:1(m.t.b : b)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(b [m.t.b])
+      xy [m.x.y]))
+  ExpressionStmt:3(
+    MemberExpr:3(
+      NameExpr(b [m.t.b])
+      zy [m.z.y])))
+MypyFile:1(
+  tmp/m/t/b.py
+  ImportFrom:1(..x, [y : xy])
+  ImportFrom:2(..z, [y : zy]))
+MypyFile:1(
+  tmp/m/x.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.x.y])
+    IntExpr(1)))
+MypyFile:1(
+  tmp/m/z.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.z.y])
+    IntExpr(3)))
+
+[case testRelativeImport3]
+import m.t
+m.zy
+m.xy
+m.t.y
+[file m/__init__.py]
+from .x import *
+from .z import *
+[file m/x.py]
+from .z import zy as xy
+[file m/z.py]
+zy = 3
+[file m/t/__init__.py]
+from .b import *
+[file m/t/b.py]
+from .. import xy as y
+[out]
+MypyFile:1(
+  Import:1(m.t)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(m)
+      zy [m.z.zy]))
+  ExpressionStmt:3(
+    MemberExpr:3(
+      NameExpr(m)
+      xy [m.z.zy]))
+  ExpressionStmt:4(
+    MemberExpr:4(
+      MemberExpr:4(
+        NameExpr(m)
+        t [m.t])
+      y [m.z.zy])))
+MypyFile:1(
+  tmp/m/t/b.py
+  ImportFrom:1(.., [xy : y]))
+MypyFile:1(
+  tmp/m/x.py
+  ImportFrom:1(.z, [zy : xy]))
+MypyFile:1(
+  tmp/m/z.py
+  AssignmentStmt:1(
+    NameExpr(zy* [m.z.zy])
+    IntExpr(3)))
+
+[case testRelativeImportFromSameModule]
+import m.x
+[file m/__init__.py]
+[file m/x.py]
+from .x import nonexistent
+[out]
+tmp/m/x.py:1: error: Module 'm.x' has no attribute 'nonexistent'
+
+[case testImportFromSameModule]
+import m.x
+[file m/__init__.py]
+[file m/x.py]
+from m.x import nonexistent
+[out]
+tmp/m/x.py:1: error: Module 'm.x' has no attribute 'nonexistent'
+
+[case testFromImportAsInStub]
+from m import *
+x
+y  # E: Name 'y' is not defined
+[file m.pyi]
+from m2 import x as x
+from m2 import y
+[file m2.py]
+x = 1
+y = 2
+[out]
+
+[case testFromImportAsInNonStub]
+from m_ import *
+x
+y
+[file m_.py]
+from m2_ import x as x
+from m2_ import y
+[file m2_.py]
+x = 1
+y = 2
+[out]
+MypyFile:1(
+  ImportAll:1(m_)
+  ExpressionStmt:2(
+    NameExpr(x [m2_.x]))
+  ExpressionStmt:3(
+    NameExpr(y [m2_.y])))
+
+[case testImportAsInStub]
+from m import *
+m2
+m3  # E: Name 'm3' is not defined
+[file m.pyi]
+import m2 as m2
+import m3
+[file m2.py]
+[file m3.py]
+[out]
+
+[case testImportAsInNonStub]
+from m_ import *
+m2_
+m3_
+[file m_.py]
+import m2_ as m2_
+import m3_
+[file m2_.py]
+[file m3_.py]
+[out]
+MypyFile:1(
+  ImportAll:1(m_)
+  ExpressionStmt:2(
+    NameExpr(m2_))
+  ExpressionStmt:3(
+    NameExpr(m3_)))
+
+[case testErrorsInMultipleModules]
+import m
+x
+[file m.py]
+y
+[out]
+tmp/m.py:1: error: Name 'y' is not defined
+main:2: error: Name 'x' is not defined
+
+[case testImportTwice]
+import typing
+from x import a, a # ok (we could give a warning, but this is valid)
+def f() -> None:
+    from x import a
+    from x import a # ok
+import x
+import x # ok, since we may import multiple submodules of a package
+[file x.py]
+a = 1
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ImportFrom:2(x, [a, a])
+  FuncDef:3(
+    f
+    def ()
+    Block:3(
+      ImportFrom:4(x, [a])
+      ImportFrom:5(x, [a])))
+  Import:6(x)
+  Import:7(x))
+MypyFile:1(
+  tmp/x.py
+  AssignmentStmt:1(
+    NameExpr(a* [x.a])
+    IntExpr(1)))
diff --git a/test-data/unit/semanal-namedtuple.test b/test-data/unit/semanal-namedtuple.test
new file mode 100644
index 0000000..a820a07
--- /dev/null
+++ b/test-data/unit/semanal-namedtuple.test
@@ -0,0 +1,177 @@
+-- Semantic analysis of named tuples
+
+[case testSimpleNamedtuple]
+from collections import namedtuple
+N = namedtuple('N', ['a'])
+def f() -> N: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[Any]))
+  FuncDef:3(
+    f
+    def () -> Tuple[Any, fallback=__main__.N]
+    Block:3(
+      PassStmt:3())))
+
+[case testTwoItemNamedtuple]
+from collections import namedtuple
+N = namedtuple('N', ['a', 'xyz'])
+def f() -> N: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[Any, Any]))
+  FuncDef:3(
+    f
+    def () -> Tuple[Any, Any, fallback=__main__.N]
+    Block:3(
+      PassStmt:3())))
+
+[case testTwoItemNamedtupleWithTupleFieldNames]
+from collections import namedtuple
+N = namedtuple('N', ('a', 'xyz'))
+def f() -> N: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[Any, Any]))
+  FuncDef:3(
+    f
+    def () -> Tuple[Any, Any, fallback=__main__.N]
+    Block:3(
+      PassStmt:3())))
+
+[case testTwoItemNamedtupleWithShorthandSyntax]
+from collections import namedtuple
+N = namedtuple('N', ' a  xyz ')
+def f() -> N: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[Any, Any]))
+  FuncDef:3(
+    f
+    def () -> Tuple[Any, Any, fallback=__main__.N]
+    Block:3(
+      PassStmt:3())))
+
+[case testNamedTupleWithItemTypes]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int),
+                     ('b', str)])
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [NamedTuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[builtins.int, builtins.str])))
+
+[case testNamedTupleWithTupleFieldNamesWithItemTypes]
+from typing import NamedTuple
+N = NamedTuple('N', (('a', int),
+                     ('b', str)))
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [NamedTuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[builtins.int, builtins.str])))
+
+[case testNamedTupleBaseClass]
+from collections import namedtuple
+N = namedtuple('N', ['x'])
+class A(N): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[Any]))
+  ClassDef:3(
+    A
+    TupleType(
+      Tuple[Any, fallback=__main__.N])
+    BaseType(
+      __main__.N)
+    PassStmt:3()))
+
+[case testNamedTupleBaseClass2]
+from collections import namedtuple
+class A(namedtuple('N', ['x'])): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  ClassDef:2(
+    A
+    TupleType(
+      Tuple[Any, fallback=__main__.N at 2])
+    BaseType(
+      __main__.N at 2)
+    PassStmt:2()))
+
+[case testNamedTupleBaseClassWithItemTypes]
+from typing import NamedTuple
+class A(NamedTuple('N', [('x', int)])): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [NamedTuple])
+  ClassDef:2(
+    A
+    TupleType(
+      Tuple[builtins.int, fallback=__main__.N at 2])
+    BaseType(
+      __main__.N at 2)
+    PassStmt:2()))
+
+-- Errors
+
+[case testNamedTupleWithTooFewArguments]
+from collections import namedtuple
+N = namedtuple('N') # E: Too few arguments for namedtuple()
+
+[case testNamedTupleWithTooManyArguments]
+from collections import namedtuple
+N = namedtuple('N', ['x'], 'y') # E: Too many arguments for namedtuple()
+
+[case testNamedTupleWithInvalidName]
+from collections import namedtuple
+N = namedtuple(1, ['x']) # E: namedtuple() expects a string literal as the first argument
+
+[case testNamedTupleWithInvalidItems]
+from collections import namedtuple
+N = namedtuple('N', 1) # E: List or tuple literal expected as the second argument to namedtuple()
+
+[case testNamedTupleWithInvalidItems2]
+from collections import namedtuple
+N = namedtuple('N', ['x', 1]) # E: String literal expected as namedtuple() item
+
+[case testNamedTupleWithUnderscoreItemName]
+from collections import namedtuple
+N = namedtuple('N', ['_fallback']) # E: namedtuple() field names cannot start with an underscore: _fallback
+
+-- NOTE: The following code works at runtime but is not yet supported by mypy.
+--       Keyword arguments may potentially be supported in the future.
+[case testNamedTupleWithNonpositionalArgs]
+from collections import namedtuple
+N = namedtuple(typename='N', field_names=['x']) # E: Unexpected arguments to namedtuple()
+
+[case testInvalidNamedTupleBaseClass]
+from typing import NamedTuple
+class A(NamedTuple('N', [1])): pass # E: Tuple expected as NamedTuple() field
+class B(A): pass
+
+[case testInvalidNamedTupleBaseClass2]
+class A(NamedTuple('N', [1])): pass
+class B(A): pass
+[out]
+main:1: error: Name 'NamedTuple' is not defined
+main:1: error: Invalid base class
diff --git a/test-data/unit/semanal-python2.test b/test-data/unit/semanal-python2.test
new file mode 100644
index 0000000..97264a5
--- /dev/null
+++ b/test-data/unit/semanal-python2.test
@@ -0,0 +1,76 @@
+-- Python 2 semantic analysis test cases.
+
+[case testPrintStatement_python2]
+print int, None
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    NameExpr(int [builtins.int])
+    NameExpr(None [builtins.None])
+    Newline))
+
+[case testPrintStatementWithTarget]
+print >>int, None
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    NameExpr(None [builtins.None])
+    Target(
+      NameExpr(int [builtins.int]))
+    Newline))
+
+[case testExecStatement]
+exec None
+exec None in int
+exec None in int, str
+[out]
+MypyFile:1(
+  ExecStmt:1(
+    NameExpr(None [builtins.None]))
+  ExecStmt:2(
+    NameExpr(None [builtins.None])
+    NameExpr(int [builtins.int]))
+  ExecStmt:3(
+    NameExpr(None [builtins.None])
+    NameExpr(int [builtins.int])
+    NameExpr(str [builtins.str])))
+
+[case testVariableLengthTuple_python2]
+from typing import Tuple, cast
+cast(Tuple[int, ...], ())
+[builtins_py2 fixtures/tuple.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Tuple, cast])
+  ExpressionStmt:2(
+    CastExpr:2(
+      TupleExpr:2()
+      builtins.tuple[builtins.int])))
+
+[case testTupleArgList_python2]
+def f(x, (y, z)):
+    x = y
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(__tuple_arg_2))
+    Block:1(
+      AssignmentStmt:1(
+        TupleExpr:1(
+          NameExpr(y* [l])
+          NameExpr(z* [l]))
+        NameExpr(__tuple_arg_2 [l]))
+      AssignmentStmt:2(
+        NameExpr(x [l])
+        NameExpr(y [l])))))
+
+[case testBackquoteExpr_python2]
+`object`
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    BackquoteExpr:1(
+      NameExpr(object [builtins.object]))))
diff --git a/test-data/unit/semanal-statements.test b/test-data/unit/semanal-statements.test
new file mode 100644
index 0000000..e104ab7
--- /dev/null
+++ b/test-data/unit/semanal-statements.test
@@ -0,0 +1,929 @@
+[case testReturn]
+def f(x): return x
+def g(): return
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      ReturnStmt:1(
+        NameExpr(x [l]))))
+  FuncDef:2(
+    g
+    Block:2(
+      ReturnStmt:2())))
+
+[case testRaise]
+raise object()
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    CallExpr:1(
+      NameExpr(object [builtins.object])
+      Args())))
+
+[case testYield]
+def f(): yield f
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Generator
+    Block:1(
+      ExpressionStmt:1(
+        YieldExpr:1(
+          NameExpr(f [__main__.f]))))))
+
+[case testAssert]
+assert object
+[out]
+MypyFile:1(
+  AssertStmt:1(
+    NameExpr(object [builtins.object])))
+
+[case testOperatorAssignment]
+x = y = 1
+x += y
+y |= x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  OperatorAssignmentStmt:2(
+    +
+    NameExpr(x [__main__.x])
+    NameExpr(y [__main__.y]))
+  OperatorAssignmentStmt:3(
+    |
+    NameExpr(y [__main__.y])
+    NameExpr(x [__main__.x])))
+
+[case testWhile]
+x = y = 1
+while x:
+  y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  WhileStmt:2(
+    NameExpr(x [__main__.x])
+    Block:2(
+      ExpressionStmt:3(
+        NameExpr(y [__main__.y])))))
+
+[case testFor]
+for x in object:
+  x
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(object [builtins.object])
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(x [__main__.x])))))
+
+[case testForInFunction]
+def f():
+  for x in f:
+    x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ForStmt:2(
+        NameExpr(x* [l])
+        NameExpr(f [__main__.f])
+        Block:2(
+          ExpressionStmt:3(
+            NameExpr(x [l])))))))
+
+[case testMultipleForIndexVars]
+for x, y in []:
+  x, y
+[out]
+MypyFile:1(
+  ForStmt:1(
+    TupleExpr:1(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    ListExpr:1()
+    Block:1(
+      ExpressionStmt:2(
+        TupleExpr:2(
+          NameExpr(x [__main__.x])
+          NameExpr(y [__main__.y]))))))
+
+[case testForIndexVarScope]
+for x in []:
+  pass
+x
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x* [__main__.x])
+    ListExpr:1()
+    Block:1(
+      PassStmt:2()))
+  ExpressionStmt:3(
+    NameExpr(x [__main__.x])))
+
+[case testForIndexVarScope2]
+def f():
+  for x in []:
+    pass
+  x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ForStmt:2(
+        NameExpr(x* [l])
+        ListExpr:2()
+        Block:2(
+          PassStmt:3()))
+      ExpressionStmt:4(
+        NameExpr(x [l])))))
+
+[case testReusingForLoopIndexVariable]
+for x in None:
+    pass
+for x in None:
+    pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(None [builtins.None])
+    Block:1(
+      PassStmt:2()))
+  ForStmt:3(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    Block:3(
+      PassStmt:4())))
+
+[case testReusingForLoopIndexVariable2]
+def f():
+    for x in None:
+        pass
+    for x in None:
+        pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ForStmt:2(
+        NameExpr(x* [l])
+        NameExpr(None [builtins.None])
+        Block:2(
+          PassStmt:3()))
+      ForStmt:4(
+        NameExpr(x [l])
+        NameExpr(None [builtins.None])
+        Block:4(
+          PassStmt:5())))))
+
+[case testLoopWithElse]
+for x in []:
+  pass
+else:
+  x
+while 1:
+  pass
+else:
+  x
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x* [__main__.x])
+    ListExpr:1()
+    Block:1(
+      PassStmt:2())
+    Else(
+      ExpressionStmt:4(
+        NameExpr(x [__main__.x]))))
+  WhileStmt:5(
+    IntExpr(1)
+    Block:5(
+      PassStmt:6())
+    Else(
+      ExpressionStmt:8(
+        NameExpr(x [__main__.x])))))
+
+[case testBreak]
+while 1:
+  break
+for x in []:
+  break
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(1)
+    Block:1(
+      BreakStmt:2()))
+  ForStmt:3(
+    NameExpr(x* [__main__.x])
+    ListExpr:3()
+    Block:3(
+      BreakStmt:4())))
+
+[case testContinue]
+while 1:
+  continue
+for x in []:
+  continue
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(1)
+    Block:1(
+      ContinueStmt:2()))
+  ForStmt:3(
+    NameExpr(x* [__main__.x])
+    ListExpr:3()
+    Block:3(
+      ContinueStmt:4())))
+
+[case testIf]
+x = 1
+if x:
+  x
+elif x:
+  x
+elif x:
+  x
+else:
+  x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  IfStmt:2(
+    If(
+      NameExpr(x [__main__.x]))
+    Then(
+      ExpressionStmt:3(
+        NameExpr(x [__main__.x])))
+    Else(
+      IfStmt:4(
+        If(
+          NameExpr(x [__main__.x]))
+        Then(
+          ExpressionStmt:5(
+            NameExpr(x [__main__.x])))
+        Else(
+          IfStmt:6(
+            If(
+              NameExpr(x [__main__.x]))
+            Then(
+              ExpressionStmt:7(
+                NameExpr(x [__main__.x])))
+            Else(
+              ExpressionStmt:9(
+                NameExpr(x [__main__.x])))))))))
+
+[case testSimpleIf]
+if object:
+  object
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      NameExpr(object [builtins.object]))
+    Then(
+      ExpressionStmt:2(
+        NameExpr(object [builtins.object])))))
+
+[case testLvalues]
+x = y = 1
+x = 1
+x.m = 1
+x[y] = 1
+x, y = 1
+[x, y] = 1
+(x, y) = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:3(
+    MemberExpr:3(
+      NameExpr(x [__main__.x])
+      m)
+    IntExpr(1))
+  AssignmentStmt:4(
+    IndexExpr:4(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:5(
+    TupleExpr:5(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:6(
+    ListExpr:6(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:7(
+    TupleExpr:7(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))
+    IntExpr(1)))
+
+[case testStarLvalues]
+*x, y = 1
+*x, (y, *z) = 1
+*(x, q), r = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      StarExpr:1(
+        NameExpr(x* [__main__.x]))
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      StarExpr:2(
+        NameExpr(x [__main__.x]))
+      TupleExpr:2(
+        NameExpr(y [__main__.y])
+        StarExpr:2(
+          NameExpr(z* [__main__.z]))))
+    IntExpr(1))
+  AssignmentStmt:3(
+    TupleExpr:3(
+      StarExpr:3(
+        TupleExpr:3(
+          NameExpr(x [__main__.x])
+          NameExpr(q* [__main__.q])))
+      NameExpr(r* [__main__.r]))
+    IntExpr(1)))
+
+[case testMultipleDefinition]
+x, y = 1
+x, y = 2
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))
+    IntExpr(2)))
+
+[case testComplexDefinitions]
+(x) = 1
+([y]) = 2
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:2(
+    ListExpr:2(
+      NameExpr(y* [__main__.y]))
+    IntExpr(2)))
+
+[case testLocalComplexDefinition]
+def f():
+  (x) = 1
+  x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x* [l])
+        IntExpr(1))
+      ExpressionStmt:3(
+        NameExpr(x [l])))))
+
+[case testMultipleDefOnlySomeNew]
+x = 1
+y, x = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      NameExpr(y* [__main__.y])
+      NameExpr(x [__main__.x]))
+    IntExpr(1)))
+
+[case testMultipleDefOnlySomeNewNestedTuples]
+x = 1
+y, (x, z) = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      NameExpr(y* [__main__.y])
+      TupleExpr:2(
+        NameExpr(x [__main__.x])
+        NameExpr(z* [__main__.z])))
+    IntExpr(1)))
+
+[case testMultipleDefOnlySomeNewNestedLists]
+x = 1
+y, [x, z] = 1
+[p, [x, r]] = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      NameExpr(y* [__main__.y])
+      ListExpr:2(
+        NameExpr(x [__main__.x])
+        NameExpr(z* [__main__.z])))
+    IntExpr(1))
+  AssignmentStmt:3(
+    ListExpr:3(
+      NameExpr(p* [__main__.p])
+      ListExpr:3(
+        NameExpr(x [__main__.x])
+        NameExpr(r* [__main__.r])))
+    IntExpr(1)))
+
+[case testIndexedDel]
+x = y = 1
+del x[y]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  DelStmt:2(
+    IndexExpr:2(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))))
+
+[case testDelGlobalName]
+x = 1
+del x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  DelStmt:2(
+    NameExpr(x [__main__.x])))
+
+[case testDelLocalName]
+def f(x):
+    del x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      DelStmt:2(
+        NameExpr(x [l])))))
+
+[case testDelMultipleThings]
+def f(x, y):
+    del x, y[0]
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(y))
+    Block:1(
+      DelStmt:2(
+        TupleExpr:2(
+          NameExpr(x [l])
+          IndexExpr:2(
+            NameExpr(y [l])
+            IntExpr(0)))))))
+
+[case testDelMultipleThingsInvalid]
+def f(x, y) -> None:
+    del x, y + 1
+[out]
+main:2: error: can't delete operator
+
+[case testTry]
+class c: pass
+try:
+  c
+except object:
+  c
+except c as e:
+  e
+except:
+  c
+finally:
+  c
+[out]
+MypyFile:1(
+  ClassDef:1(
+    c
+    PassStmt:1())
+  TryStmt:2(
+    Block:2(
+      ExpressionStmt:3(
+        NameExpr(c [__main__.c])))
+    NameExpr(object [builtins.object])
+    Block:4(
+      ExpressionStmt:5(
+        NameExpr(c [__main__.c])))
+    NameExpr(c [__main__.c])
+    NameExpr(e* [__main__.e])
+    Block:6(
+      ExpressionStmt:7(
+        NameExpr(e [__main__.e])))
+    Block:8(
+      ExpressionStmt:9(
+        NameExpr(c [__main__.c])))
+    Finally(
+      ExpressionStmt:11(
+        NameExpr(c [__main__.c])))))
+
+[case testTryElse]
+try:
+  pass
+except:
+  pass
+else:
+  object
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    Block:3(
+      PassStmt:4())
+    Else(
+      ExpressionStmt:6(
+        NameExpr(object [builtins.object])))))
+
+[case testTryWithOnlyFinally]
+try:
+  pass
+finally:
+  pass
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    Finally(
+      PassStmt:4())))
+
+[case testExceptWithMultipleTypes]
+class c: pass
+try:
+  pass
+except (c, object) as e:
+  e
+[out]
+MypyFile:1(
+  ClassDef:1(
+    c
+    PassStmt:1())
+  TryStmt:2(
+    Block:2(
+      PassStmt:3())
+    TupleExpr:4(
+      NameExpr(c [__main__.c])
+      NameExpr(object [builtins.object]))
+    NameExpr(e* [__main__.e])
+    Block:4(
+      ExpressionStmt:5(
+        NameExpr(e [__main__.e])))))
+
+[case testRaiseWithoutExpr]
+raise
+[out]
+MypyFile:1(
+  RaiseStmt:1())
+
+[case testWith]
+with object:
+  object
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(object [builtins.object]))
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(object [builtins.object])))))
+
+[case testWithAndVariable]
+with object as x:
+  x
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(object [builtins.object]))
+    Target(
+      NameExpr(x* [__main__.x]))
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(x [__main__.x])))))
+
+[case testWithInFunction]
+def f():
+  with f as x:
+    x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      WithStmt:2(
+        Expr(
+          NameExpr(f [__main__.f]))
+        Target(
+          NameExpr(x* [l]))
+        Block:2(
+          ExpressionStmt:3(
+            NameExpr(x [l])))))))
+
+[case testComplexWith]
+with object, object:
+  pass
+with object as a, object as b:
+  pass
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(object [builtins.object]))
+    Expr(
+      NameExpr(object [builtins.object]))
+    Block:1(
+      PassStmt:2()))
+  WithStmt:3(
+    Expr(
+      NameExpr(object [builtins.object]))
+    Target(
+      NameExpr(a* [__main__.a]))
+    Expr(
+      NameExpr(object [builtins.object]))
+    Target(
+      NameExpr(b* [__main__.b]))
+    Block:3(
+      PassStmt:4())))
+
+[case testVariableInBlock]
+while object:
+  x = None
+  x = x
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    NameExpr(object [builtins.object])
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x* [__main__.x])
+        NameExpr(None [builtins.None]))
+      AssignmentStmt:3(
+        NameExpr(x [__main__.x])
+        NameExpr(x [__main__.x])))))
+
+[case testVariableInExceptHandler]
+try:
+  pass
+except object as o:
+  x = None
+  o = x
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    NameExpr(object [builtins.object])
+    NameExpr(o* [__main__.o])
+    Block:3(
+      AssignmentStmt:4(
+        NameExpr(x* [__main__.x])
+        NameExpr(None [builtins.None]))
+      AssignmentStmt:5(
+        NameExpr(o [__main__.o])
+        NameExpr(x [__main__.x])))))
+
+[case testCallInExceptHandler]
+try:
+  pass
+except object as o:
+  o = object()
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    NameExpr(object [builtins.object])
+    NameExpr(o* [__main__.o])
+    Block:3(
+      AssignmentStmt:4(
+        NameExpr(o [__main__.o])
+        CallExpr:4(
+          NameExpr(object [builtins.object])
+          Args())))))
+
+[case testTryExceptWithMultipleHandlers]
+try:
+    pass
+except BaseException as e:
+    pass
+except Err as f:
+    f = BaseException() # Fail
+    f = Err()
+class Err(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    NameExpr(BaseException [builtins.BaseException])
+    NameExpr(e* [__main__.e])
+    Block:3(
+      PassStmt:4())
+    NameExpr(Err [__main__.Err])
+    NameExpr(f* [__main__.f])
+    Block:5(
+      AssignmentStmt:6(
+        NameExpr(f [__main__.f])
+        CallExpr:6(
+          NameExpr(BaseException [builtins.BaseException])
+          Args()))
+      AssignmentStmt:7(
+        NameExpr(f [__main__.f])
+        CallExpr:7(
+          NameExpr(Err [__main__.Err])
+          Args()))))
+  ClassDef:8(
+    Err
+    BaseType(
+      builtins.BaseException)
+    PassStmt:8()))
+
+[case testMultipleAssignmentWithPartialNewDef]
+o = None
+x, o = o, o
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(o* [__main__.o])
+    NameExpr(None [builtins.None]))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      NameExpr(x* [__main__.x])
+      NameExpr(o [__main__.o]))
+    TupleExpr:2(
+      NameExpr(o [__main__.o])
+      NameExpr(o [__main__.o]))))
+
+[case testFunctionDecorator]
+def decorate(f): pass
+ at decorate
+def g():
+    g()
+[out]
+MypyFile:1(
+  FuncDef:1(
+    decorate
+    Args(
+      Var(f))
+    Block:1(
+      PassStmt:1()))
+  Decorator:2(
+    Var(g)
+    NameExpr(decorate [__main__.decorate])
+    FuncDef:3(
+      g
+      Block:3(
+        ExpressionStmt:4(
+          CallExpr:4(
+            NameExpr(g [__main__.g])
+            Args()))))))
+
+[case testTryWithinFunction]
+def f() -> None:
+    try:
+        pass
+    except object as o:
+        pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def ()
+    Block:1(
+      TryStmt:2(
+        Block:2(
+          PassStmt:3())
+        NameExpr(object [builtins.object])
+        NameExpr(o* [l])
+        Block:4(
+          PassStmt:5())))))
+
+[case testReuseExceptionVariable]
+def f() -> None:
+    try:
+        pass
+    except object as o:
+        pass
+    except object as o:
+        pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def ()
+    Block:1(
+      TryStmt:2(
+        Block:2(
+          PassStmt:3())
+        NameExpr(object [builtins.object])
+        NameExpr(o* [l])
+        Block:4(
+          PassStmt:5())
+        NameExpr(object [builtins.object])
+        NameExpr(o [l])
+        Block:6(
+          PassStmt:7())))))
+
+[case testWithMultiple]
+def f(a):
+    pass
+def main():
+    with f(0) as a, f(a) as b:
+        x = a, b
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(a))
+    Block:1(
+      PassStmt:2()))
+  FuncDef:3(
+    main
+    Block:3(
+      WithStmt:4(
+        Expr(
+          CallExpr:4(
+            NameExpr(f [__main__.f])
+            Args(
+              IntExpr(0))))
+        Target(
+          NameExpr(a* [l]))
+        Expr(
+          CallExpr:4(
+            NameExpr(f [__main__.f])
+            Args(
+              NameExpr(a [l]))))
+        Target(
+          NameExpr(b* [l]))
+        Block:4(
+          AssignmentStmt:5(
+            NameExpr(x* [l])
+            TupleExpr:5(
+              NameExpr(a [l])
+              NameExpr(b [l]))))))))
diff --git a/test-data/unit/semanal-symtable.test b/test-data/unit/semanal-symtable.test
new file mode 100644
index 0000000..4821635
--- /dev/null
+++ b/test-data/unit/semanal-symtable.test
@@ -0,0 +1,52 @@
+[case testEmptyFile]
+[out]
+-- Note that builtins are ignored to simplify output.
+__main__:
+  SymbolTable()
+
+[case testVarDef]
+x = 1
+[out]
+__main__:
+  SymbolTable(
+    x : Gdef/Var (__main__))
+
+[case testFuncDef]
+def f(): pass
+[out]
+__main__:
+  SymbolTable(
+    f : Gdef/FuncDef (__main__))
+
+[case testEmptyClassDef]
+class c: pass
+[out]
+__main__:
+  SymbolTable(
+    c : Gdef/TypeInfo (__main__))
+
+[case testImport]
+import m
+[file m.py]
+x = 1
+[out]
+__main__:
+  SymbolTable(
+    m : ModuleRef/MypyFile (__main__))
+m:
+  SymbolTable(
+    x : Gdef/Var (m))
+
+[case testImportFromModule]
+from m import x
+[file m.py]
+class x: pass
+y = 1
+[out]
+__main__:
+  SymbolTable(
+    x : Gdef/TypeInfo (__main__))
+m:
+  SymbolTable(
+    x : Gdef/TypeInfo (m)
+    y : Gdef/Var (m))
diff --git a/test-data/unit/semanal-typealiases.test b/test-data/unit/semanal-typealiases.test
new file mode 100644
index 0000000..5178a71
--- /dev/null
+++ b/test-data/unit/semanal-typealiases.test
@@ -0,0 +1,440 @@
+[case testListTypeAlias]
+from typing import List
+def f() -> List[int]: pass
+[builtins fixtures/list.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [List])
+  FuncDef:2(
+    f
+    def () -> builtins.list[builtins.int]
+    Block:2(
+      PassStmt:2())))
+
+[case testDictTypeAlias]
+from typing import Dict
+def f() -> Dict[int, str]: pass
+[builtins fixtures/dict.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Dict])
+  FuncDef:2(
+    f
+    def () -> builtins.dict[builtins.int, builtins.str]
+    Block:2(
+      PassStmt:2())))
+
+[case testQualifiedTypeAlias]
+import typing
+def f() -> typing.List[int]: pass
+[builtins fixtures/list.pyi]
+[out]
+MypyFile:1(
+  Import:1(typing)
+  FuncDef:2(
+    f
+    def () -> builtins.list[builtins.int]
+    Block:2(
+      PassStmt:2())))
+
+[case testTypeApplicationWithTypeAlias]
+from typing import List
+List[List[int]]
+[builtins fixtures/list.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [List])
+  ExpressionStmt:2(
+    TypeApplication:2(
+      NameExpr(List [builtins.list])
+      Types(
+        builtins.list[builtins.int]))))
+
+[case testTypeApplicationWithQualifiedTypeAlias]
+import typing
+typing.List[typing.List[int]]
+[builtins fixtures/list.pyi]
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ExpressionStmt:2(
+    TypeApplication:2(
+      MemberExpr:2(
+        NameExpr(typing)
+        List [builtins.list])
+      Types(
+        builtins.list[builtins.int]))))
+
+[case testSimpleTypeAlias]
+import typing
+class A: pass
+A2 = A
+def f(x: A2) -> A: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    PassStmt:2())
+  AssignmentStmt:3(
+    NameExpr(A2* [__main__.A2])
+    NameExpr(A [__main__.A]))
+  FuncDef:4(
+    f
+    Args(
+      Var(x))
+    def (x: __main__.A) -> __main__.A
+    Block:4(
+      PassStmt:4())))
+
+[case testQualifiedSimpleTypeAlias]
+import typing
+import _m
+A2 = _m.A
+x = 1 # type: A2
+[file _m.py]
+import typing
+class A: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  Import:2(_m)
+  AssignmentStmt:3(
+    NameExpr(A2* [__main__.A2])
+    MemberExpr:3(
+      NameExpr(_m)
+      A [_m.A]))
+  AssignmentStmt:4(
+    NameExpr(x [__main__.x])
+    IntExpr(1)
+    _m.A))
+
+[case testUnionTypeAlias]
+from typing import Union
+U = Union[int, str]
+def f(x: U) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  AssignmentStmt:2(
+    NameExpr(U* [__main__.U])
+    TypeAliasExpr(Union[builtins.int, builtins.str]))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, builtins.str])
+    Block:3(
+      PassStmt:3())))
+
+[case testUnionTypeAlias2]
+from typing import Union
+class A: pass
+U = Union[int, A]
+def f(x: U) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  ClassDef:2(
+    A
+    PassStmt:2())
+  AssignmentStmt:3(
+    NameExpr(U* [__main__.U])
+    TypeAliasExpr(Union[builtins.int, __main__.A]))
+  FuncDef:4(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, __main__.A])
+    Block:4(
+      PassStmt:4())))
+
+[case testUnionTypeAliasWithQualifiedUnion]
+import typing
+U = typing.Union[int, str]
+def f(x: U) -> None: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  AssignmentStmt:2(
+    NameExpr(U* [__main__.U])
+    TypeAliasExpr(Union[builtins.int, builtins.str]))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, builtins.str])
+    Block:3(
+      PassStmt:3())))
+
+[case testTupleTypeAlias]
+from typing import Tuple
+T = Tuple[int, str]
+def f(x: T) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Tuple])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeAliasExpr(Tuple[builtins.int, builtins.str]))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: Tuple[builtins.int, builtins.str])
+    Block:3(
+      PassStmt:3())))
+
+[case testCallableTypeAlias]
+from typing import Callable
+C = Callable[[int], None]
+def f(x: C) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Callable])
+  AssignmentStmt:2(
+    NameExpr(C* [__main__.C])
+    TypeAliasExpr(def (builtins.int)))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: def (builtins.int))
+    Block:3(
+      PassStmt:3())))
+
+[case testGenericTypeAlias]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+class G(Generic[T]): pass
+A = G[int]
+def f(x: A) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Generic, TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2())
+  ClassDef:3(
+    G
+    TypeVars(
+      T)
+    PassStmt:3())
+  AssignmentStmt:4(
+    NameExpr(A* [__main__.A])
+    TypeAliasExpr(__main__.G[builtins.int]))
+  FuncDef:5(
+    f
+    Args(
+      Var(x))
+    def (x: __main__.G[builtins.int])
+    Block:5(
+      PassStmt:5())))
+
+[case testGenericTypeAlias2]
+from typing import List
+A = List[int]
+def f(x: A) -> None: pass
+[builtins fixtures/list.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [List])
+  AssignmentStmt:2(
+    NameExpr(A* [__main__.A])
+    TypeAliasExpr(builtins.list[builtins.int]))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.list[builtins.int])
+    Block:3(
+      PassStmt:3())))
+
+[case testImportUnionTypeAlias]
+import typing
+from _m import U
+def f(x: U) -> None: pass
+[file _m.py]
+from typing import Union
+class A: pass
+U = Union[int, A]
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ImportFrom:2(_m, [U])
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, _m.A])
+    Block:3(
+      PassStmt:3())))
+
+[case testImportUnionTypeAlias2]
+import typing
+import _m
+def f(x: _m.U) -> None: pass
+[file _m.py]
+from typing import Union
+class A: pass
+U = Union[int, A]
+[out]
+MypyFile:1(
+  Import:1(typing)
+  Import:2(_m)
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, _m.A])
+    Block:3(
+      PassStmt:3())))
+
+[case testImportSimpleTypeAlias]
+import typing
+from _m import A
+def f(x: A) -> None: pass
+[file _m.py]
+import typing
+A = int
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ImportFrom:2(_m, [A])
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.int)
+    Block:3(
+      PassStmt:3())))
+
+[case testImportSimpleTypeAlias2]
+import typing
+import _m
+def f(x: _m.A) -> None: pass
+[file _m.py]
+import typing
+A = int
+[out]
+MypyFile:1(
+  Import:1(typing)
+  Import:2(_m)
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.int)
+    Block:3(
+      PassStmt:3())))
+
+[case testAnyTypeAlias]
+from typing import Any
+A = Any
+a = 1 # type: A
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  AssignmentStmt:2(
+    NameExpr(A* [__main__.A])
+    NameExpr(Any [typing.Any]))
+  AssignmentStmt:3(
+    NameExpr(a [__main__.a])
+    IntExpr(1)
+    Any))
+
+[case testAnyTypeAlias2]
+import typing
+A = typing.Any
+a = 1 # type: A
+[out]
+MypyFile:1(
+  Import:1(typing)
+  AssignmentStmt:2(
+    NameExpr(A* [__main__.A])
+    MemberExpr:2(
+      NameExpr(typing)
+      Any [typing.Any]))
+  AssignmentStmt:3(
+    NameExpr(a [__main__.a])
+    IntExpr(1)
+    Any))
+
+[case testTypeAliasAlias]
+from typing import Union
+U = Union[int, str]
+U2 = U
+x = 1 # type: U2
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  AssignmentStmt:2(
+    NameExpr(U* [__main__.U])
+    TypeAliasExpr(Union[builtins.int, builtins.str]))
+  AssignmentStmt:3(
+    NameExpr(U2* [__main__.U2])
+    NameExpr(U [__main__.U]))
+  AssignmentStmt:4(
+    NameExpr(x [__main__.x])
+    IntExpr(1)
+    Union[builtins.int, builtins.str]))
+
+[case testTypeAliasOfImportedAlias]
+from typing import Union
+from _m import U
+U2 = U
+x = 1 # type: U2
+[file _m.py]
+from typing import Union
+U = Union[int, str]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  ImportFrom:2(_m, [U])
+  AssignmentStmt:3(
+    NameExpr(U2* [__main__.U2])
+    NameExpr(U [_m.U]))
+  AssignmentStmt:4(
+    NameExpr(x [__main__.x])
+    IntExpr(1)
+    Union[builtins.int, builtins.str]))
+
+[case testListTypeDoesNotGenerateAlias]
+import typing
+A = [int, str]
+a = 1 # type: A  # E: Invalid type "__main__.A"
+
+[case testCantUseStringLiteralAsTypeAlias]
+from typing import Union
+A = 'Union[int, str]'
+a = 1 # type: A  # E: Invalid type "__main__.A"
+
+[case testStringLiteralTypeAsAliasComponent]
+from typing import Union
+A = Union['int', str]
+a = 1 # type: A
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  AssignmentStmt:2(
+    NameExpr(A* [__main__.A])
+    TypeAliasExpr(Union[builtins.int, builtins.str]))
+  AssignmentStmt:3(
+    NameExpr(a [__main__.a])
+    IntExpr(1)
+    Union[builtins.int, builtins.str]))
+
+[case testComplexTypeAlias]
+from typing import Union, Tuple, Any
+A = Union['int', Tuple[int, Any]]
+a = 1 # type: A
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union, Tuple, Any])
+  AssignmentStmt:2(
+    NameExpr(A* [__main__.A])
+    TypeAliasExpr(Union[builtins.int, Tuple[builtins.int, Any]]))
+  AssignmentStmt:3(
+    NameExpr(a [__main__.a])
+    IntExpr(1)
+    Union[builtins.int, Tuple[builtins.int, Any]]))
diff --git a/test-data/unit/semanal-typeddict.test b/test-data/unit/semanal-typeddict.test
new file mode 100644
index 0000000..9c1454e
--- /dev/null
+++ b/test-data/unit/semanal-typeddict.test
@@ -0,0 +1,36 @@
+-- Create Type
+
+-- TODO: Implement support for this syntax.
+--[case testCanCreateTypedDictTypeWithKeywordArguments]
+--from mypy_extensions import TypedDict
+--Point = TypedDict('Point', x=int, y=int)
+--[builtins fixtures/dict.pyi]
+--[out]
+--MypyFile:1(
+--  ImportFrom:1(mypy_extensions, [TypedDict])
+--  AssignmentStmt:2(
+--    NameExpr(Point* [__main__.Point])
+--    TypedDictExpr:2(Point)))
+
+-- TODO: Implement support for this syntax.
+--[case testCanCreateTypedDictTypeWithDictCall]
+--from mypy_extensions import TypedDict
+--Point = TypedDict('Point', dict(x=int, y=int))
+--[builtins fixtures/dict.pyi]
+--[out]
+--MypyFile:1(
+--  ImportFrom:1(mypy_extensions, [TypedDict])
+--  AssignmentStmt:2(
+--    NameExpr(Point* [__main__.Point])
+--    TypedDictExpr:2(Point)))
+
+[case testCanCreateTypedDictTypeWithDictLiteral]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+[builtins fixtures/dict.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(mypy_extensions, [TypedDict])
+  AssignmentStmt:2(
+    NameExpr(Point* [__main__.Point])
+    TypedDictExpr:2(Point)))
diff --git a/test-data/unit/semanal-typeinfo.test b/test-data/unit/semanal-typeinfo.test
new file mode 100644
index 0000000..098ce0b
--- /dev/null
+++ b/test-data/unit/semanal-typeinfo.test
@@ -0,0 +1,88 @@
+[case testEmptyFile]
+[out]
+TypeInfoMap()
+
+[case testEmptyClass]
+class c: pass
+[out]
+TypeInfoMap(
+  __main__.c : TypeInfo(
+    Name(__main__.c)
+    Bases(builtins.object)
+    Mro(__main__.c, builtins.object)
+    Names()))
+
+[case testClassWithMethod]
+class c:
+  def f(self): pass
+[out]
+TypeInfoMap(
+  __main__.c : TypeInfo(
+    Name(__main__.c)
+    Bases(builtins.object)
+    Mro(__main__.c, builtins.object)
+    Names(
+      f)))
+
+[case testClassWithAttributes]
+class c:
+  def __init__(self, x):
+    self.y = x
+    self.z = 1
+[out]
+TypeInfoMap(
+  __main__.c : TypeInfo(
+    Name(__main__.c)
+    Bases(builtins.object)
+    Mro(__main__.c, builtins.object)
+    Names(
+      __init__
+      y
+      z)))
+
+[case testBaseClass]
+class base: pass
+class c(base): pass
+[out]
+TypeInfoMap(
+  __main__.base : TypeInfo(
+    Name(__main__.base)
+    Bases(builtins.object)
+    Mro(__main__.base, builtins.object)
+    Names())
+  __main__.c : TypeInfo(
+    Name(__main__.c)
+    Bases(__main__.base)
+    Mro(__main__.c, __main__.base, builtins.object)
+    Names()))
+
+[case testClassAndAbstractClass]
+from abc import abstractmethod, ABCMeta
+import typing
+
+class i(metaclass=ABCMeta): pass
+class c(i): pass
+[out]
+TypeInfoMap(
+  __main__.c : TypeInfo(
+    Name(__main__.c)
+    Bases(__main__.i)
+    Mro(__main__.c, __main__.i, builtins.object)
+    Names())
+  __main__.i : TypeInfo(
+    Name(__main__.i)
+    Bases(builtins.object)
+    Mro(__main__.i, builtins.object)
+    Names()))
+
+[case testAttributeWithoutType]
+class A:
+    a = A
+[out]
+TypeInfoMap(
+  __main__.A : TypeInfo(
+    Name(__main__.A)
+    Bases(builtins.object)
+    Mro(__main__.A, builtins.object)
+    Names(
+      a)))
diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test
new file mode 100644
index 0000000..ca00516
--- /dev/null
+++ b/test-data/unit/semanal-types.test
@@ -0,0 +1,1495 @@
+[case testVarWithType]
+import typing
+class A: pass
+x = A() # type: A
+y = x
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    PassStmt:2())
+  AssignmentStmt:3(
+    NameExpr(x [__main__.x])
+    CallExpr:3(
+      NameExpr(A [__main__.A])
+      Args())
+    __main__.A)
+  AssignmentStmt:4(
+    NameExpr(y* [__main__.y])
+    NameExpr(x [__main__.x])))
+
+[case testLocalVarWithType]
+class A: pass
+def f():
+  x = None # type: A
+  y = x
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    PassStmt:1())
+  FuncDef:2(
+    f
+    Block:2(
+      AssignmentStmt:3(
+        NameExpr(x [l])
+        NameExpr(None [builtins.None])
+        __main__.A)
+      AssignmentStmt:4(
+        NameExpr(y* [l])
+        NameExpr(x [l])))))
+
+[case testAnyType]
+from typing import Any
+x = None # type: Any
+y = x
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    Any)
+  AssignmentStmt:3(
+    NameExpr(y* [__main__.y])
+    NameExpr(x [__main__.x])))
+
+[case testMemberVarWithType]
+import typing
+class A:
+  def __init__(self):
+    self.x = None # type: int
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    FuncDef:3(
+      __init__
+      Args(
+        Var(self))
+      Block:3(
+        AssignmentStmt:4(
+          MemberExpr:4(
+            NameExpr(self [l])
+            x)
+          NameExpr(None [builtins.None])
+          builtins.int)))))
+
+[case testClassVarWithType]
+import typing
+class A:
+  x = None # type: int
+  x = 1
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    AssignmentStmt:3(
+      NameExpr(x [m])
+      NameExpr(None [builtins.None])
+      builtins.int)
+    AssignmentStmt:4(
+      NameExpr(x [__main__.A.x])
+      IntExpr(1))))
+
+[case testFunctionSig]
+from typing import Any
+class A: pass
+def f(x: A) -> A: pass
+def g(x: Any, y: A) -> None:
+  z = x, y
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  ClassDef:2(
+    A
+    PassStmt:2())
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: __main__.A) -> __main__.A
+    Block:3(
+      PassStmt:3()))
+  FuncDef:4(
+    g
+    Args(
+      Var(x)
+      Var(y))
+    def (x: Any, y: __main__.A)
+    Block:4(
+      AssignmentStmt:5(
+        NameExpr(z* [l])
+        TupleExpr:5(
+          NameExpr(x [l])
+          NameExpr(y [l]))))))
+
+[case testBaseclass]
+class A: pass
+class B(A): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    PassStmt:1())
+  ClassDef:2(
+    B
+    BaseType(
+      __main__.A)
+    PassStmt:2()))
+
+[case testMultipleVarDef]
+
+class A: pass
+class B: pass
+a, b = None, None # type: (A, B)
+x = a, b
+[out]
+MypyFile:1(
+  ClassDef:2(
+    A
+    PassStmt:2())
+  ClassDef:3(
+    B
+    PassStmt:3())
+  AssignmentStmt:4(
+    TupleExpr:4(
+      NameExpr(a [__main__.a])
+      NameExpr(b [__main__.b]))
+    TupleExpr:4(
+      NameExpr(None [builtins.None])
+      NameExpr(None [builtins.None]))
+    Tuple[__main__.A, __main__.B])
+  AssignmentStmt:5(
+    NameExpr(x* [__main__.x])
+    TupleExpr:5(
+      NameExpr(a [__main__.a])
+      NameExpr(b [__main__.b]))))
+
+[case testGenericType]
+from typing import TypeVar, Generic, Any
+
+t = TypeVar('t')
+
+class A(Generic[t]): pass
+class B: pass
+x = None # type: A[B]
+y = None # type: A[Any]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic, Any])
+  AssignmentStmt:3(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:3())
+  ClassDef:5(
+    A
+    TypeVars(
+      t)
+    PassStmt:5())
+  ClassDef:6(
+    B
+    PassStmt:6())
+  AssignmentStmt:7(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    __main__.A[__main__.B])
+  AssignmentStmt:8(
+    NameExpr(y [__main__.y])
+    NameExpr(None [builtins.None])
+    __main__.A[Any]))
+
+[case testGenericType2]
+from typing import TypeVar, Generic, Any
+t = TypeVar('t')
+s = TypeVar('s')
+class A(Generic[t, s]): pass
+class B: pass
+x = None # type: A[B, Any]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic, Any])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:3())
+  ClassDef:4(
+    A
+    TypeVars(
+      t
+      s)
+    PassStmt:4())
+  ClassDef:5(
+    B
+    PassStmt:5())
+  AssignmentStmt:6(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    __main__.A[__main__.B, Any]))
+
+[case testAssignmentAfterDef]
+
+
+class A: pass
+a = None # type: A
+a = 1
+def f():
+  b = None # type: A
+  b = 1
+[out]
+MypyFile:1(
+  ClassDef:3(
+    A
+    PassStmt:3())
+  AssignmentStmt:4(
+    NameExpr(a [__main__.a])
+    NameExpr(None [builtins.None])
+    __main__.A)
+  AssignmentStmt:5(
+    NameExpr(a [__main__.a])
+    IntExpr(1))
+  FuncDef:6(
+    f
+    Block:6(
+      AssignmentStmt:7(
+        NameExpr(b [l])
+        NameExpr(None [builtins.None])
+        __main__.A)
+      AssignmentStmt:8(
+        NameExpr(b [l])
+        IntExpr(1)))))
+
+[case testCast]
+from typing import TypeVar, Generic, Any, cast
+t = TypeVar('t')
+class c: pass
+class d(Generic[t]): pass
+cast(Any, 1)
+cast(c, 1)
+cast(d[c], c)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic, Any, cast])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    c
+    PassStmt:3())
+  ClassDef:4(
+    d
+    TypeVars(
+      t)
+    PassStmt:4())
+  ExpressionStmt:5(
+    CastExpr:5(
+      IntExpr(1)
+      Any))
+  ExpressionStmt:6(
+    CastExpr:6(
+      IntExpr(1)
+      __main__.c))
+  ExpressionStmt:7(
+    CastExpr:7(
+      NameExpr(c [__main__.c])
+      __main__.d[__main__.c])))
+
+[case testCastToQualifiedTypeAndCast]
+import typing
+import _m
+typing.cast(_m.C, object)
+[file _m.py]
+class C: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  Import:2(_m)
+  ExpressionStmt:3(
+    CastExpr:3(
+      NameExpr(object [builtins.object])
+      _m.C)))
+
+[case testLongQualifiedCast]
+import typing
+import _m._n
+typing.cast(_m._n.C, object)
+[file _m/__init__.py]
+[file _m/_n.py]
+class C: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  Import:2(_m._n)
+  ExpressionStmt:3(
+    CastExpr:3(
+      NameExpr(object [builtins.object])
+      _m._n.C)))
+
+[case testCastTargetWithTwoTypeArgs]
+from typing import TypeVar, Generic, cast
+t = TypeVar('t')
+s = TypeVar('s')
+class C(Generic[t, s]): pass
+cast(C[str, int], C)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic, cast])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:3())
+  ClassDef:4(
+    C
+    TypeVars(
+      t
+      s)
+    PassStmt:4())
+  ExpressionStmt:5(
+    CastExpr:5(
+      NameExpr(C [__main__.C])
+      __main__.C[builtins.str, builtins.int])))
+
+[case testCastToTupleType]
+from typing import Tuple, cast
+cast(Tuple[int, str], None)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Tuple, cast])
+  ExpressionStmt:2(
+    CastExpr:2(
+      NameExpr(None [builtins.None])
+      Tuple[builtins.int, builtins.str])))
+
+[case testCastToFunctionType]
+from typing import Callable, cast
+cast(Callable[[int], str], None)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Callable, cast])
+  ExpressionStmt:2(
+    CastExpr:2(
+      NameExpr(None [builtins.None])
+      def (builtins.int) -> builtins.str)))
+
+[case testCastToStringLiteralType]
+from typing import cast
+cast('int', 1)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [cast])
+  ExpressionStmt:2(
+    CastExpr:2(
+      IntExpr(1)
+      builtins.int)))
+
+[case testFunctionTypeVariable]
+from typing import TypeVar
+t = TypeVar('t')
+def f(x: t) -> None:
+  y = None # type: t
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [t] (x: t`-1)
+    Block:3(
+      AssignmentStmt:4(
+        NameExpr(y [l])
+        NameExpr(None [builtins.None])
+        t`-1))))
+
+[case testTwoFunctionTypeVariables]
+from typing import TypeVar
+t = TypeVar('t')
+u = TypeVar('u')
+def f(x: t, y: u, z: t) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(u* [__main__.u])
+    TypeVarExpr:3())
+  FuncDef:4(
+    f
+    Args(
+      Var(x)
+      Var(y)
+      Var(z))
+    def [t, u] (x: t`-1, y: u`-2, z: t`-1)
+    Block:4(
+      PassStmt:4())))
+
+[case testNestedGenericFunctionTypeVariable]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+def f(x: A[t], y) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  FuncDef:4(
+    f
+    Args(
+      Var(x)
+      Var(y))
+    def [t] (x: __main__.A[t`-1], y: Any)
+    Block:4(
+      PassStmt:4())))
+
+[case testNestedGenericFunctionTypeVariable2]
+from typing import TypeVar, Tuple, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+def f(x: Tuple[int, t]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Tuple, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  FuncDef:4(
+    f
+    Args(
+      Var(x))
+    def [t] (x: Tuple[builtins.int, t`-1])
+    Block:4(
+      PassStmt:4())))
+
+[case testNestedGenericFunctionTypeVariable3]
+from typing import TypeVar, Callable, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+def f(x: Callable[[int, t], int]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Callable, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  FuncDef:4(
+    f
+    Args(
+      Var(x))
+    def [t] (x: def (builtins.int, t`-1) -> builtins.int)
+    Block:4(
+      PassStmt:4())))
+
+[case testNestedGenericFunctionTypeVariable4]
+from typing import TypeVar, Callable, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+def f(x: Callable[[], t]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Callable, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  FuncDef:4(
+    f
+    Args(
+      Var(x))
+    def [t] (x: def () -> t`-1)
+    Block:4(
+      PassStmt:4())))
+
+[case testGenericFunctionTypeVariableInReturnType]
+from typing import TypeVar
+t = TypeVar('t')
+def f() -> t: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    def [t] () -> t`-1
+    Block:3(
+      PassStmt:3())))
+
+[case testSelfType]
+class A:
+  def f(self, o: object) -> None: pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self)
+        Var(o))
+      def (self: __main__.A, o: builtins.object)
+      Block:2(
+        PassStmt:2()))))
+
+[case testNestedGenericFunction]
+from typing import TypeVar
+t = TypeVar('t')
+def f() -> None:
+    def g() -> t: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    def ()
+    Block:3(
+      FuncDef:4(
+        g
+        def [t] () -> t`-1
+        Block:4(
+          PassStmt:4())))))
+
+[case testClassTvar]
+from typing import TypeVar, Generic
+
+t = TypeVar('t')
+
+class c(Generic[t]):
+  def f(self) -> t: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:3(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:3())
+  ClassDef:5(
+    c
+    TypeVars(
+      t)
+    FuncDef:6(
+      f
+      Args(
+        Var(self))
+      def (self: __main__.c[t`1]) -> t`1
+      Block:6(
+        PassStmt:6()))))
+
+[case testClassTvar2]
+from typing import TypeVar, Generic
+
+t = TypeVar('t')
+s = TypeVar('s')
+
+class c(Generic[t, s]):
+  def f(self, x: s) -> t: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:3(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:3())
+  AssignmentStmt:4(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:4())
+  ClassDef:6(
+    c
+    TypeVars(
+      t
+      s)
+    FuncDef:7(
+      f
+      Args(
+        Var(self)
+        Var(x))
+      def (self: __main__.c[t`1, s`2], x: s`2) -> t`1
+      Block:7(
+        PassStmt:7()))))
+
+[case testGenericBaseClass]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class d(Generic[t]): pass
+class c(d[t], Generic[t]): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    d
+    TypeVars(
+      t)
+    PassStmt:3())
+  ClassDef:4(
+    c
+    TypeVars(
+      t)
+    BaseType(
+      __main__.d[t`1])
+    PassStmt:4()))
+
+[case testTupleType]
+from typing import Tuple
+t = None # type: tuple
+t1 = None # type: Tuple[object]
+t2 = None # type: Tuple[int, object]
+[builtins fixtures/tuple.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Tuple])
+  AssignmentStmt:2(
+    NameExpr(t [__main__.t])
+    NameExpr(None [builtins.None])
+    builtins.tuple[Any])
+  AssignmentStmt:3(
+    NameExpr(t1 [__main__.t1])
+    NameExpr(None [builtins.None])
+    Tuple[builtins.object])
+  AssignmentStmt:4(
+    NameExpr(t2 [__main__.t2])
+    NameExpr(None [builtins.None])
+    Tuple[builtins.int, builtins.object]))
+
+[case testVariableLengthTuple]
+from typing import Tuple
+t = None # type: Tuple[int, ...]
+[builtins fixtures/tuple.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Tuple])
+  AssignmentStmt:2(
+    NameExpr(t [__main__.t])
+    NameExpr(None [builtins.None])
+    builtins.tuple[builtins.int]))
+
+[case testInvalidTupleType]
+from typing import Tuple
+t = None # type: Tuple[int, str, ...] # E: Unexpected '...'
+[out]
+
+[case testFunctionTypes]
+from typing import Callable
+f = None # type: Callable[[object, int], str]
+g = None # type: Callable[[], None]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Callable])
+  AssignmentStmt:2(
+    NameExpr(f [__main__.f])
+    NameExpr(None [builtins.None])
+    def (builtins.object, builtins.int) -> builtins.str)
+  AssignmentStmt:3(
+    NameExpr(g [__main__.g])
+    NameExpr(None [builtins.None])
+    def ()))
+
+[case testOverloadedFunction]
+from typing import overload, Any
+ at overload
+def f(a: object) -> int: a
+ at overload
+def f(a: str) -> object: a
+
+def f(a: Any) -> Any: return a
+
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [overload, Any])
+  OverloadedFuncDef:2(
+    FuncDef:7(
+      f
+      Args(
+        Var(a))
+      def (a: Any) -> Any
+      Block:7(
+        ReturnStmt:7(
+          NameExpr(a [l]))))
+    Overload(def (a: builtins.object) -> builtins.int, \
+             def (a: builtins.str) -> builtins.object)
+    Decorator:2(
+      Var(f)
+      NameExpr(overload [typing.overload])
+      FuncDef:3(
+        f
+        Args(
+          Var(a))
+        def (a: builtins.object) -> builtins.int
+        Block:3(
+          ExpressionStmt:3(
+            NameExpr(a [l])))))
+    Decorator:4(
+      Var(f)
+      NameExpr(overload [typing.overload])
+      FuncDef:5(
+        f
+        Args(
+          Var(a))
+        def (a: builtins.str) -> builtins.object
+        Block:5(
+          ExpressionStmt:5(
+            NameExpr(a [l])))))))
+
+[case testReferenceToOverloadedFunction]
+from typing import overload
+ at overload
+def f() -> None: pass
+ at overload
+def f(x: int) -> None: pass
+
+def f(*args) -> None: pass
+
+x = f
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [overload])
+  OverloadedFuncDef:2(
+    FuncDef:7(
+      f
+      def (*args: Any)
+      VarArg(
+        Var(args))
+      Block:7(
+        PassStmt:7()))
+    Overload(def (), def (x: builtins.int))
+    Decorator:2(
+      Var(f)
+      NameExpr(overload [typing.overload])
+      FuncDef:3(
+        f
+        def ()
+        Block:3(
+          PassStmt:3())))
+    Decorator:4(
+      Var(f)
+      NameExpr(overload [typing.overload])
+      FuncDef:5(
+        f
+        Args(
+          Var(x))
+        def (x: builtins.int)
+        Block:5(
+          PassStmt:5()))))
+  AssignmentStmt:9(
+    NameExpr(x* [__main__.x])
+    NameExpr(f [__main__.f])))
+
+[case testNestedOverloadedFunction]
+from typing import overload
+def f():
+    @overload
+    def g(): pass
+    @overload
+    def g(x): pass
+
+    def g(*args): pass
+
+    y = g
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [overload])
+  FuncDef:2(
+    f
+    Block:2(
+      OverloadedFuncDef:3(
+        FuncDef:8(
+          g
+          VarArg(
+            Var(args))
+          Block:8(
+            PassStmt:8()))
+        Overload(def () -> Any, def (x: Any) -> Any)
+        Decorator:3(
+          Var(g)
+          NameExpr(overload [typing.overload])
+          FuncDef:4(
+            g
+            Block:4(
+              PassStmt:4())))
+        Decorator:5(
+          Var(g)
+          NameExpr(overload [typing.overload])
+          FuncDef:6(
+            g
+            Args(
+              Var(x))
+            Block:6(
+              PassStmt:6()))))
+      AssignmentStmt:10(
+        NameExpr(y* [l])
+        NameExpr(g [l])))))
+
+[case testImplicitGenericTypeArgs]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+s = TypeVar('s')
+class A(Generic[t, s]): pass
+x = None # type: A
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:3())
+  ClassDef:4(
+    A
+    TypeVars(
+      t
+      s)
+    PassStmt:4())
+  AssignmentStmt:5(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    __main__.A[Any, Any]))
+
+[case testImplicitTypeArgsAndGenericBaseClass]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+s = TypeVar('s')
+class B(Generic[s]): pass
+class A(B, Generic[t]): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:3())
+  ClassDef:4(
+    B
+    TypeVars(
+      s)
+    PassStmt:4())
+  ClassDef:5(
+    A
+    TypeVars(
+      t)
+    BaseType(
+      __main__.B[Any])
+    PassStmt:5()))
+
+[case testTypeApplication]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+x = A[int]()
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  AssignmentStmt:4(
+    NameExpr(x* [__main__.x])
+    CallExpr:4(
+      TypeApplication:4(
+        NameExpr(A [__main__.A])
+        Types(
+          builtins.int))
+      Args())))
+
+[case testTypeApplicationWithTwoTypeArgs]
+from typing import TypeVar, Generic, Any
+t = TypeVar('t')
+s = TypeVar('s')
+class A(Generic[t, s]): pass
+x = A[int, Any]()
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic, Any])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:3())
+  ClassDef:4(
+    A
+    TypeVars(
+      t
+      s)
+    PassStmt:4())
+  AssignmentStmt:5(
+    NameExpr(x* [__main__.x])
+    CallExpr:5(
+      TypeApplication:5(
+        NameExpr(A [__main__.A])
+        Types(
+          builtins.int
+          Any))
+      Args())))
+
+[case testFunctionTypeApplication]
+from typing import TypeVar
+t = TypeVar('t')
+def f(x: t) -> None: pass
+f[int](1)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [t] (x: t`-1)
+    Block:3(
+      PassStmt:3()))
+  ExpressionStmt:4(
+    CallExpr:4(
+      TypeApplication:4(
+        NameExpr(f [__main__.f])
+        Types(
+          builtins.int))
+      Args(
+        IntExpr(1)))))
+
+[case testTypeApplicationWithStringLiteralType]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+A['int']()
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  ExpressionStmt:4(
+    CallExpr:4(
+      TypeApplication:4(
+        NameExpr(A [__main__.A])
+        Types(
+          builtins.int))
+      Args())))
+
+[case testVarArgsAndKeywordArgs]
+def g(*x: int, y: str = ''): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    g
+    MaxPos(0)
+    Args(
+      Var(y))
+    def (*x: builtins.int, *, y: builtins.str =) -> Any
+    Init(
+      AssignmentStmt:1(
+        NameExpr(y [l])
+        StrExpr()))
+    VarArg(
+      Var(x))
+    Block:1(
+      PassStmt:1())))
+
+[case testQualifiedGeneric]
+from typing import TypeVar
+import typing
+T = TypeVar('T')
+class A(typing.Generic[T]): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  Import:2(typing)
+  AssignmentStmt:3(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:3())
+  ClassDef:4(
+    A
+    TypeVars(
+      T)
+    PassStmt:4()))
+
+[case testQualifiedTypevar]
+import typing
+T = typing.TypeVar('T')
+def f(x: T) -> T: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [T] (x: T`-1) -> T`-1
+    Block:3(
+      PassStmt:3())))
+
+[case testAliasedTypevar]
+from typing import TypeVar as tv
+T = tv('T')
+def f(x: T) -> T: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar : tv])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [T] (x: T`-1) -> T`-1
+    Block:3(
+      PassStmt:3())))
+
+[case testLocalTypevar]
+from typing import TypeVar
+def f():
+    T = TypeVar('T')
+    def g(x: T) -> T: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  FuncDef:2(
+    f
+    Block:2(
+      AssignmentStmt:3(
+        NameExpr(T* [l])
+        TypeVarExpr:3())
+      FuncDef:4(
+        g
+        Args(
+          Var(x))
+        def [T] (x: T`-1) -> T`-1
+        Block:4(
+          PassStmt:4())))))
+
+[case testClassLevelTypevar]
+from typing import TypeVar
+class A:
+    T = TypeVar('T')
+    def g(self, x: T) -> T: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  ClassDef:2(
+    A
+    AssignmentStmt:3(
+      NameExpr(T* [m])
+      TypeVarExpr:3())
+    FuncDef:4(
+      g
+      Args(
+        Var(self)
+        Var(x))
+      def [T] (self: __main__.A, x: T`-1) -> T`-1
+      Block:4(
+        PassStmt:4()))))
+
+[case testImportTypevar]
+from typing import Generic
+from _m import T
+class A(Generic[T]):
+    y = None # type: T
+[file _m.py]
+from typing import TypeVar
+T = TypeVar('T')
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Generic])
+  ImportFrom:2(_m, [T])
+  ClassDef:3(
+    A
+    TypeVars(
+      T)
+    AssignmentStmt:4(
+      NameExpr(y [m])
+      NameExpr(None [builtins.None])
+      T`1)))
+
+[case testQualifiedReferenceToTypevarInClass]
+from typing import Generic
+import _m
+class A(Generic[_m.T]):
+    a = None # type: _m.T
+    def f(self, x: _m.T):
+        b = None # type: _m.T
+[file _m.py]
+from typing import TypeVar
+T = TypeVar('T')
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Generic])
+  Import:2(_m)
+  ClassDef:3(
+    A
+    TypeVars(
+      _m.T)
+    AssignmentStmt:4(
+      NameExpr(a [m])
+      NameExpr(None [builtins.None])
+      _m.T`1)
+    FuncDef:5(
+      f
+      Args(
+        Var(self)
+        Var(x))
+      def (self: __main__.A[_m.T`1], x: _m.T`1) -> Any
+      Block:5(
+        AssignmentStmt:6(
+          NameExpr(b [l])
+          NameExpr(None [builtins.None])
+          _m.T`1)))))
+
+[case testQualifiedReferenceToTypevarInFunctionSignature]
+import _m
+def f(x: _m.T) -> None:
+    a = None # type: _m.T
+[file _m.py]
+from typing import TypeVar
+T = TypeVar('T')
+[out]
+MypyFile:1(
+  Import:1(_m)
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def [_m.T] (x: _m.T`-1)
+    Block:2(
+      AssignmentStmt:3(
+        NameExpr(a [l])
+        NameExpr(None [builtins.None])
+        _m.T`-1))))
+
+[case testFunctionCommentAnnotation]
+from typing import Any
+def f(x): # type: (int) -> Any
+  x = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.int) -> Any
+    Block:2(
+      AssignmentStmt:3(
+        NameExpr(x [l])
+        IntExpr(1)))))
+
+[case testMethodCommentAnnotation]
+import typing
+class A:
+  def f(self, x): # type: (int) -> str
+    x = 1
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    FuncDef:3(
+      f
+      Args(
+        Var(self)
+        Var(x))
+      def (self: __main__.A, x: builtins.int) -> builtins.str
+      Block:3(
+        AssignmentStmt:4(
+          NameExpr(x [l])
+          IntExpr(1))))))
+
+[case testTypevarWithValues]
+from typing import TypeVar, Any
+T = TypeVar('T', int, str)
+S = TypeVar('S', Any, int, str)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Any])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      Values(
+        builtins.int
+        builtins.str)))
+  AssignmentStmt:3(
+    NameExpr(S* [__main__.S])
+    TypeVarExpr:3(
+      Values(
+        Any
+        builtins.int
+        builtins.str))))
+
+[case testTypevarWithValuesAndVariance]
+from typing import TypeVar
+T = TypeVar('T', int, str, covariant=True)
+[builtins fixtures/bool.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      Variance(COVARIANT)
+      Values(
+        builtins.int
+        builtins.str))))
+
+[case testTypevarWithBound]
+from typing import TypeVar
+T = TypeVar('T', bound=int)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      UpperBound(builtins.int))))
+
+[case testGenericFunctionWithValueSet]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      Values(
+        builtins.int
+        builtins.str)))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [T in (builtins.int, builtins.str)] (x: T`-1) -> T`-1
+    Block:3(
+      PassStmt:3())))
+
+[case testGenericClassWithValueSet]
+from typing import TypeVar, Generic
+T = TypeVar('T', int, str)
+class C(Generic[T]): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      Values(
+        builtins.int
+        builtins.str)))
+  ClassDef:3(
+    C
+    TypeVars(
+      T in (builtins.int, builtins.str))
+    PassStmt:3()))
+
+[case testGenericFunctionWithBound]
+from typing import TypeVar
+T = TypeVar('T', bound=int)
+def f(x: T) -> T: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      UpperBound(builtins.int)))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [T <: builtins.int] (x: T`-1) -> T`-1
+    Block:3(
+      PassStmt:3())))
+
+[case testGenericClassWithBound]
+from typing import TypeVar, Generic
+T = TypeVar('T', bound=int)
+class C(Generic[T]): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      UpperBound(builtins.int)))
+  ClassDef:3(
+    C
+    TypeVars(
+      T <: builtins.int)
+    PassStmt:3()))
+
+[case testSimpleDucktypeDecorator]
+from typing import _promote
+ at _promote(str)
+class S: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [_promote])
+  ClassDef:2(
+    S
+    Promote(builtins.str)
+    Decorators(
+      PromoteExpr:2(builtins.str))
+    PassStmt:3()))
+
+[case testUnionType]
+from typing import Union
+def f(x: Union[int, str]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, builtins.str])
+    Block:2(
+      PassStmt:2())))
+
+[case testUnionTypeWithNoneItem]
+from typing import Union
+def f(x: Union[int, None]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, builtins.None])
+    Block:2(
+      PassStmt:2())))
+
+[case testUnionTypeWithNoneItemAndTwoItems]
+from typing import Union
+def f(x: Union[int, None, str]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, builtins.None, builtins.str])
+    Block:2(
+      PassStmt:2())))
+
+[case testUnionTypeWithSingleItem]
+from typing import Union
+def f(x: Union[int]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.int)
+    Block:2(
+      PassStmt:2())))
+
+[case testOptionalTypes]
+from typing import Optional
+x = 1  # type: Optional[int]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Optional])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    IntExpr(1)
+    Union[builtins.int, builtins.None]))
+
+[case testInvalidOptionalType]
+from typing import Optional
+x = 1  # type: Optional[int, str]  # E: Optional[...] must have exactly one type argument
+y = 1  # type: Optional  # E: Optional[...] must have exactly one type argument
+[out]
+
+[case testCoAndContravariantTypeVar]
+from typing import TypeVar
+T = TypeVar('T', covariant=True)
+S = TypeVar('S', contravariant=True)
+[builtins fixtures/bool.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      Variance(COVARIANT)))
+  AssignmentStmt:3(
+    NameExpr(S* [__main__.S])
+    TypeVarExpr:3(
+      Variance(CONTRAVARIANT))))
+
+[case testTupleExpressionAsType]
+def f(x: (int, int)) -> None: pass
+[out]
+main:1: error: Invalid tuple literal type
+
+[case tesQualifiedTypeNameBasedOnAny]
+from typing import Any
+x = 0 # type: Any
+z = 0 # type: x.y
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    IntExpr(0)
+    Any)
+  AssignmentStmt:3(
+    NameExpr(z [__main__.z])
+    IntExpr(0)
+    Any))
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
new file mode 100644
index 0000000..2795bb3
--- /dev/null
+++ b/test-data/unit/stubgen.test
@@ -0,0 +1,632 @@
+[case testEmptyFile]
+[out]
+
+[case testSingleFunction]
+def f():
+    x = 1
+[out]
+def f(): ...
+
+[case testTwoFunctions]
+def f(a, b):
+    x = 1
+def g(arg):
+    pass
+[out]
+def f(a, b): ...
+def g(arg): ...
+
+[case testDefaultArgInt]
+def f(a, b=2): ...
+def g(b=-1, c=0): ...
+[out]
+def f(a, b: int = ...): ...
+def g(b: int = ..., c: int = ...): ...
+
+[case testDefaultArgNone]
+def f(x=None): ...
+[out]
+from typing import Any, Optional
+
+def f(x: Optional[Any] = ...): ...
+
+[case testDefaultArgBool]
+def f(x=True, y=False): ...
+[out]
+def f(x: bool = ..., y: bool = ...): ...
+
+[case testDefaultArgStr]
+def f(x='foo'): ...
+[out]
+def f(x: str = ...): ...
+
+[case testDefaultArgBytes]
+def f(x=b'foo'): ...
+[out]
+def f(x: bytes = ...): ...
+
+[case testDefaultArgFloat]
+def f(x=1.2): ...
+[out]
+def f(x: float = ...): ...
+
+[case testDefaultArgOther]
+def f(x=ord): ...
+[out]
+from typing import Any
+
+def f(x: Any = ...): ...
+
+[case testVarArgs]
+def f(x, *y): ...
+[out]
+def f(x, *y): ...
+
+[case testKwVarArgs]
+def f(x, **y): ...
+[out]
+def f(x, **y): ...
+
+[case testClass]
+class A:
+    def f(self, x):
+        x = 1
+def g(): ...
+[out]
+class A:
+    def f(self, x): ...
+
+def g(): ...
+
+[case testVariable]
+x = 1
+[out]
+x = ...  # type: int
+
+[case testMultipleVariable]
+x = y = 1
+[out]
+x = ...  # type: int
+y = ...  # type: int
+
+[case testClassVariable]
+class C:
+    x = 1
+[out]
+class C:
+    x = ...  # type: int
+
+[case testSelfAssignment]
+class C:
+    def __init__(self):
+        self.x = 1
+        x.y = 2
+[out]
+class C:
+    x = ...  # type: int
+    def __init__(self) -> None: ...
+
+[case testSelfAndClassBodyAssignment]
+x = 1
+class C:
+    x = 1
+    def __init__(self):
+        self.x = 1
+        self.x = 1
+[out]
+x = ...  # type: int
+
+class C:
+    x = ...  # type: int
+    def __init__(self) -> None: ...
+
+[case testEmptyClass]
+class A: ...
+[out]
+class A: ...
+
+[case testSkipPrivateFunction]
+def _f(): ...
+def g(): ...
+[out]
+def g(): ...
+
+[case testIncludePrivateFunction]
+# flags:  --include-private
+def _f(): ...
+def g(): ...
+[out]
+def _f(): ...
+def g(): ...
+
+[case testSkipPrivateMethod]
+class A:
+    def _f(self): ...
+[out]
+class A: ...
+
+[case testIncludePrivateMethod]
+# flags:  --include-private
+class A:
+    def _f(self): ...
+[out]
+class A:
+    def _f(self): ...
+
+[case testSkipPrivateVar]
+_x = 1
+class A:
+    _y = 1
+[out]
+class A: ...
+
+[case testIncludePrivateVar]
+# flags:  --include-private
+_x = 1
+class A:
+    _y = 1
+[out]
+_x = ...  # type: int
+
+class A:
+    _y = ...  # type: int
+
+[case testSpecialInternalVar]
+__all__ = []
+__author__ = ''
+__version__ = ''
+[out]
+
+[case testBaseClass]
+class A: ...
+class B(A): ...
+[out]
+class A: ...
+class B(A): ...
+
+[case testDecoratedFunction]
+ at decorator
+def foo(x): ...
+[out]
+def foo(x): ...
+
+[case testMultipleAssignment]
+x, y = 1, 2
+[out]
+from typing import Any
+
+x = ...  # type: Any
+y = ...  # type: Any
+
+[case testMultipleAssignment2]
+[x, y] = 1, 2
+[out]
+from typing import Any
+
+x = ...  # type: Any
+y = ...  # type: Any
+
+[case testKeywordOnlyArg]
+def f(x, *, y=1): ...
+def g(x, *, y=1, z=2): ...
+[out]
+def f(x, *, y: int = ...): ...
+def g(x, *, y: int = ..., z: int = ...): ...
+
+[case testProperty]
+class A:
+    @property
+    def f(self):
+        return 1
+    @f.setter
+    def f(self, x): ...
+[out]
+class A:
+    @property
+    def f(self): ...
+    @f.setter
+    def f(self, x): ...
+
+[case testStaticMethod]
+class A:
+    @staticmethod
+    def f(x): ...
+[out]
+class A:
+    @staticmethod
+    def f(x): ...
+
+[case testClassMethod]
+class A:
+    @classmethod
+    def f(cls): ...
+[out]
+class A:
+    @classmethod
+    def f(cls): ...
+
+[case testIfMainCheck]
+def a(): ...
+if __name__ == '__main__':
+    x = 1
+    def f(): ...
+def b(): ...
+[out]
+def a(): ...
+def b(): ...
+
+[case testImportStar]
+from x import *
+from a.b import *
+def f(): ...
+[out]
+from x import *
+from a.b import *
+
+def f(): ...
+
+[case testNoSpacesBetweenEmptyClasses]
+class X:
+    def g(self): ...
+class A: ...
+class B: ...
+class C:
+    def f(self): ...
+[out]
+class X:
+    def g(self): ...
+
+class A: ...
+class B: ...
+
+class C:
+    def f(self): ...
+
+[case testExceptionBaseClasses]
+class A(Exception): ...
+class B(ValueError): ...
+[out]
+class A(Exception): ...
+class B(ValueError): ...
+
+[case testOmitSomeSpecialMethods]
+class A:
+    def __str__(self): ...
+    def __repr__(self): ...
+    def __eq__(self): ...
+    def __getstate__(self): ...
+    def __setstate__(self, state): ...
+[out]
+class A:
+    def __eq__(self): ...
+
+[case testOmitDefsNotInAll_import]
+__all__ = [] + ['f']
+def f(): ...
+def g(): ...
+[out]
+def f(): ...
+
+[case testVarDefsNotInAll_import]
+__all__ = [] + ['f', 'g']
+def f(): ...
+x = 1
+y = 1
+def g(): ...
+[out]
+def f(): ...
+def g(): ...
+
+[case testIncludeClassNotInAll_import]
+__all__ = [] + ['f']
+def f(): ...
+class A: ...
+[out]
+def f(): ...
+
+class A: ...
+
+[case testAllAndClass_import]
+__all__ = ['A']
+class A:
+    x = 1
+    def f(self): ...
+[out]
+class A:
+    x = ...  # type: int
+    def f(self): ...
+
+[case testSkipMultiplePrivateDefs]
+class A: ...
+_x = 1
+_y = 1
+_z = 1
+class C: ...
+[out]
+class A: ...
+class C: ...
+
+[case testIncludeMultiplePrivateDefs]
+# flags:  --include-private
+class A: ...
+_x = 1
+_y = 1
+_z = 1
+class C: ...
+[out]
+class A: ...
+
+_x = ...  # type: int
+_y = ...  # type: int
+_z = ...  # type: int
+
+class C: ...
+
+[case testIncludeFromImportIfInAll_import]
+from re import match, search, sub
+__all__ = ['match', 'sub', 'x']
+x = 1
+[out]
+from re import match as match, sub as sub
+
+x = ...  # type: int
+
+[case testExportModule_import]
+import re
+__all__ = ['re', 'x']
+x = 1
+y = 2
+[out]
+import re as re
+
+x = ...  # type: int
+
+[case testExportModuleAs_import]
+import re as rex
+__all__ = ['rex', 'x']
+x = 1
+y = 2
+[out]
+import re as rex
+
+x = ...  # type: int
+
+[case testExportModuleInPackage_import]
+import urllib.parse as p
+__all__ = ['p']
+[out]
+import urllib.parse as p
+
+[case testExportModuleInPackageUnsupported_import]
+import urllib.parse
+__all__ = ['urllib']
+[out]
+# Names in __all__ with no definition:
+#   urllib
+
+[case testRelativeImportAll]
+from .x import *
+[out]
+from .x import *
+
+[case testCommentForUndefinedName_import]
+__all__ = ['f', 'x', 'C', 'g']
+def f(): ...
+x = 1
+class C:
+    def g(self): ...
+[out]
+def f(): ...
+
+x = ...  # type: int
+
+class C:
+    def g(self): ...
+
+# Names in __all__ with no definition:
+#   g
+
+[case testIgnoreSlots]
+class A:
+    __slots__ = ()
+[out]
+class A: ...
+
+[case testSkipPrivateProperty]
+class A:
+    @property
+    def _foo(self): ...
+[out]
+class A: ...
+
+[case testIncludePrivateProperty]
+# flags:  --include-private
+class A:
+    @property
+    def _foo(self): ...
+[out]
+class A:
+    @property
+    def _foo(self): ...
+
+[case testSkipPrivateStaticAndClassMethod]
+class A:
+    @staticmethod
+    def _foo(): ...
+    @classmethod
+    def _bar(cls): ...
+[out]
+class A: ...
+
+[case testIncludePrivateStaticAndClassMethod]
+# flags:  --include-private
+class A:
+    @staticmethod
+    def _foo(): ...
+    @classmethod
+    def _bar(cls): ...
+[out]
+class A:
+    @staticmethod
+    def _foo(): ...
+    @classmethod
+    def _bar(cls): ...
+
+[case testNamedtuple]
+import collections, x
+X = collections.namedtuple('X', ['a', 'b'])
+[out]
+from collections import namedtuple
+
+X = namedtuple('X', ['a', 'b'])
+
+[case testNamedtupleAltSyntax]
+from collections import namedtuple, x
+X = namedtuple('X', 'a b')
+[out]
+from collections import namedtuple
+
+X = namedtuple('X', 'a b')
+
+[case testNamedtupleWithUnderscore]
+from collections import namedtuple as _namedtuple
+def f(): ...
+X = _namedtuple('X', 'a b')
+def g(): ...
+[out]
+from collections import namedtuple as _namedtuple
+from collections import namedtuple
+
+def f(): ...
+
+X = namedtuple('X', 'a b')
+
+def g(): ...
+
+[case testNamedtupleBaseClass]
+import collections, x
+_X = collections.namedtuple('_X', ['a', 'b'])
+class Y(_X): ...
+[out]
+from collections import namedtuple
+
+_X = namedtuple('_X', ['a', 'b'])
+
+class Y(_X): ...
+
+[case testArbitraryBaseClass]
+import x
+class D(x.C): ...
+[out]
+import x
+
+class D(x.C): ...
+
+[case testArbitraryBaseClass]
+import x.y
+class D(x.y.C): ...
+[out]
+import x.y
+
+class D(x.y.C): ...
+
+[case testUnqualifiedArbitraryBaseClassWithNoDef]
+class A(int): ...
+[out]
+class A(int): ...
+
+[case testUnqualifiedArbitraryBaseClass]
+from x import X
+class A(X): ...
+[out]
+from x import X
+
+class A(X): ...
+
+[case testUnqualifiedArbitraryBaseClassWithImportAs]
+from x import X as _X
+class A(_X): ...
+[out]
+from x import X as _X
+
+class A(_X): ...
+
+[case testObjectBaseClass]
+class A(object): ...
+[out]
+class A: ...
+
+[case testEmptyLines]
+def x(): ...
+def f():
+    class A:
+        def f(self):
+            self.x = 1
+def g(): ...
+[out]
+def x(): ...
+def f(): ...
+def g(): ...
+
+[case testNestedClass]
+class A:
+    class B:
+        x = 1
+        def f(self): ...
+    def g(self): ...
+[out]
+class A:
+    class B:
+        x = ...  # type: int
+        def f(self): ...
+    def g(self): ...
+
+[case testExportViaRelativeImport]
+from .api import get
+[out]
+from .api import get as get
+
+[case testExportViaRelativePackageImport]
+from .packages.urllib3.contrib import parse
+[out]
+from .packages.urllib3.contrib import parse as parse
+
+[case testNoExportViaRelativeImport]
+from . import get
+[out]
+
+[case testRelativeImportAndBase]
+from .x import X
+class A(X):
+     pass
+[out]
+from .x import X as X
+
+class A(X): ...
+
+[case testDuplicateDef]
+def syslog(a): pass
+def syslog(a): pass
+[out]
+def syslog(a): ...
+
+[case testAsyncAwait_fast_parser]
+async def f(a):
+   x = await y
+[out]
+def f(a): ...
+
+[case testInferOptionalOnlyFunc]
+class A:
+    x = None
+    def __init__(self, a=None) -> None:
+        self.x = []
+[out]
+from typing import Any, Optional
+
+class A:
+    x = ...  # type: Any
+    def __init__(self, a: Optional[Any] = ...) -> None: ...
+
+-- More features/fixes:
+--   do not export deleted names
diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test
new file mode 100644
index 0000000..4135926
--- /dev/null
+++ b/test-data/unit/typexport-basic.test
@@ -0,0 +1,1169 @@
+-- Test cases for exporting node types from the type checker.
+--
+-- Each test case consists of at least two sections.
+-- The first section contains [case NAME-skip] followed by the input code,
+-- while the second section contains [out] followed by the output from the type
+-- checker.
+--
+-- The first line of input code should be a regexp in comment that describes
+-- the information to dump (prefix with ##). The regexp is matched against
+-- the following items:
+--
+--   * each name of an expression node
+--   * each type string of a node (e.g. OpExpr)
+--
+-- Lines starting with "--" in this file will be ignored.
+
+
+-- Expressions
+-- -----------
+
+
+[case testConstructorCall]
+import typing
+A()
+B()
+class A: pass
+class B: pass
+[out]
+CallExpr(2) : A
+NameExpr(2) : def () -> A
+CallExpr(3) : B
+NameExpr(3) : def () -> B
+
+[case testLiterals]
+import typing
+5
+2.3
+'foo'
+[builtins fixtures/primitives.pyi]
+[out]
+IntExpr(2) : builtins.int
+FloatExpr(3) : builtins.float
+StrExpr(4) : builtins.str
+
+[case testNameExpression]
+
+a = None # type: A
+a # node
+def f(aa: 'A') -> None:
+  b = None # type: B
+  aa # node
+  b  # node
+class A:
+  def g(self) -> None:
+    self # node
+class B: pass
+[out]
+NameExpr(3) : A
+NameExpr(6) : A
+NameExpr(7) : B
+NameExpr(10) : A
+
+[case testEllipsis]
+import typing
+...
+[out]
+EllipsisExpr(2) : builtins.ellipsis
+
+[case testMemberAccess]
+## MemberExpr|CallExpr
+
+a = None # type: A
+a.m
+a.f
+a.f()
+class A:
+  m = None # type: A
+  def f(self) -> 'B': pass
+class B: pass
+[out]
+MemberExpr(4) : A
+MemberExpr(5) : def () -> B
+CallExpr(6) : B
+MemberExpr(6) : def () -> B
+
+[case testCastExpression]
+## CastExpr|[a-z]
+from typing import Any, cast
+d = None # type: Any
+b = None # type: B
+class A: pass
+class B(A): pass
+cast(A, d)
+cast(A, b)
+cast(B, b)
+[out]
+CastExpr(7) : A
+NameExpr(7) : Any
+CastExpr(8) : A
+NameExpr(8) : B
+CastExpr(9) : B
+NameExpr(9) : B
+
+[case testArithmeticOps]
+## OpExpr
+import typing
+a = 1 + 2
+1.2 * 3
+2.2 - 3
+1 / 2
+[file builtins.py]
+class object:
+    def __init__(self) -> None: pass
+class function: pass
+class int:
+    def __add__(self, x: int) -> int: pass
+    def __truediv__(self, x: int) -> float: pass
+class float:
+    def __mul__(self, x: int) -> float: pass
+    def __sub__(self, x: int) -> float: pass
+class type: pass
+class str: pass
+[out]
+OpExpr(3) : builtins.int
+OpExpr(4) : builtins.float
+OpExpr(5) : builtins.float
+OpExpr(6) : builtins.float
+
+[case testComparisonOps]
+## ComparisonExpr
+import typing
+1 == object()
+1 == 2
+2 < 3
+1 < 2 < 3
+8 > 3
+4 < 6 > 2
+[file builtins.py]
+class object:
+    def __init__(self) -> None: pass
+class int:
+    def __eq__(self, x: object) -> bool: pass
+    def __lt__(self, x: int) -> bool: pass
+    def __gt__(self, x: int) -> int: pass
+class bool: pass
+class type: pass
+class function: pass
+class str: pass
+[out]
+ComparisonExpr(3) : builtins.bool
+ComparisonExpr(4) : builtins.bool
+ComparisonExpr(5) : builtins.bool
+ComparisonExpr(6) : builtins.bool
+ComparisonExpr(7) : builtins.int
+ComparisonExpr(8) : builtins.object
+
+[case testBooleanOps]
+## OpExpr|UnaryExpr
+import typing
+a = 1
+a and a
+a or a
+not a
+[builtins fixtures/bool.pyi]
+[out]
+OpExpr(4) : builtins.int
+OpExpr(5) : builtins.int
+UnaryExpr(6) : builtins.bool
+
+[case testBooleanOpsOnBools]
+## OpExpr|UnaryExpr
+import typing
+a = bool()
+a and a
+a or a
+not a
+[builtins fixtures/bool.pyi]
+[out]
+OpExpr(4) : builtins.bool
+OpExpr(5) : builtins.bool
+UnaryExpr(6) : builtins.bool
+
+[case testFunctionCall]
+## CallExpr
+from typing import Tuple
+f(
+  A(),
+  B())
+class A: pass
+class B: pass
+def f(a: A, b: B) -> Tuple[A, B]: pass
+[builtins fixtures/tuple-simple.pyi]
+[out]
+CallExpr(3) : Tuple[A, B]
+CallExpr(4) : A
+CallExpr(5) : B
+
+
+-- Statements
+-- ----------
+
+
+[case testSimpleAssignment]
+from typing import Any
+a = None # type: A
+b = a # type: Any
+b = a
+a = b
+
+class A: pass
+[out]
+NameExpr(3) : A
+NameExpr(4) : A
+NameExpr(4) : Any
+NameExpr(5) : A
+NameExpr(5) : Any
+
+[case testMemberAssignment]
+from typing import Any
+class A:
+  a = None # type: A
+  b = None # type: Any
+  def f(self) -> None:
+    self.b = self.a
+    self.a.a = self.b
+[out]
+MemberExpr(6) : A
+MemberExpr(6) : Any
+NameExpr(6) : A
+NameExpr(6) : A
+MemberExpr(7) : A
+MemberExpr(7) : A
+MemberExpr(7) : A
+NameExpr(7) : A
+NameExpr(7) : A
+
+[case testIf]
+
+a = None # type: bool
+if a:
+  1
+elif not a:
+  1
+[builtins fixtures/bool.pyi]
+[out]
+NameExpr(3) : builtins.bool
+IntExpr(4) : builtins.int
+NameExpr(5) : builtins.bool
+UnaryExpr(5) : builtins.bool
+IntExpr(6) : builtins.int
+
+[case testWhile]
+
+a = None # type: bool
+while a:
+  a
+[builtins fixtures/bool.pyi]
+[out]
+NameExpr(3) : builtins.bool
+NameExpr(4) : builtins.bool
+
+
+-- Simple type inference
+-- ---------------------
+
+
+[case testInferSingleType]
+import typing
+x = ()
+[builtins fixtures/primitives.pyi]
+[out]
+NameExpr(2) : Tuple[]
+TupleExpr(2) : Tuple[]
+
+[case testInferTwoTypes]
+## NameExpr
+import typing
+(s,
+i) = 'x', 1
+[builtins fixtures/primitives.pyi]
+[out]
+NameExpr(3) : builtins.str
+NameExpr(4) : builtins.int
+
+[case testInferSingleLocalVarType]
+import typing
+def f() -> None:
+    x = ()
+[builtins fixtures/primitives.pyi]
+[out]
+NameExpr(3) : Tuple[]
+TupleExpr(3) : Tuple[]
+
+
+-- Basic generics
+-- --------------
+
+
+[case testImplicitBoundTypeVarsForMethod]
+## MemberExpr
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+  def f(self) -> T: pass
+class B: pass
+def g() -> None:
+  a = None # type: A[B]
+  f = a.f
+[out]
+MemberExpr(9) : def () -> B
+
+[case testImplicitBoundTypeVarsForSelfMethodReference]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+  def f(self) -> T:
+    return self.f()
+[out]
+CallExpr(5) : T`1
+MemberExpr(5) : def () -> T`1
+NameExpr(5) : A[T`1]
+
+[case testGenericFunctionCallWithTypeApp-skip]
+## CallExpr|TypeApplication|NameExpr
+from typing import Any, TypeVar, Tuple
+T = TypeVar('T')
+class A: pass
+f[A](A())
+f[Any](A())
+def f(a: T) -> Tuple[T, T]: pass
+[builtins fixtures/tuple.pyi]
+[out]
+CallExpr(5) : A
+CallExpr(5) : Tuple[A, A]
+NameExpr(5) : def () -> A
+NameExpr(5) : def (a: A) -> Tuple[A, A]
+TypeApplication(5) : def (a: A) -> Tuple[A, A]
+CallExpr(6) : A
+CallExpr(6) : Tuple[Any, Any]
+NameExpr(6) : def () -> A
+NameExpr(6) : def (a: Any) -> Tuple[Any, Any]
+TypeApplication(6) : def (a: Any) -> Tuple[Any, Any]
+
+-- NOTE: Type applications are not supported for generic methods, so the
+--       following test cases are commented out.
+
+--[case testGenericMethodCallWithTypeApp]
+--## CallExpr|MemberExpr|TypeApplication
+--from typing import Any, TypeVar, Tuple
+--T = TypeVar('T')
+--class A:
+--  def f(self, a: T) -> Tuple[T, T]: pass
+--a.f[A](a)
+--a.f[Any](a)
+--a = None # type: A
+--[builtins fixtures/tuple.py]
+--[out]
+--CallExpr(2) : Tuple[A, A]
+--MemberExpr(2) : def (A a) -> Tuple[A, A]
+--TypeApplication(2) : def (A a) -> Tuple[A, A]
+--CallExpr(3) : Tuple[Any, Any]
+--MemberExpr(3) : def (any a) -> Tuple[Any, Any]
+--TypeApplication(3) : def (any a) -> Tuple[Any, Any]
+
+--[case testGenericMethodCallInGenericTypeWithTypeApp]
+--## CallExpr|MemberExpr|TypeApplication
+--from typing import Any, TypeVar, Generic, Tuple
+--T = TypeVar('T')
+--S = TypeVar('S')
+--class B: pass
+--class C: pass
+--a.f[B](b)
+--a.f[Any](b)
+--class A(Generic[T]):
+--  def f(self, a: S) -> Tuple[T, S]: pass
+--a = None # type: A[C]
+--b = None # type: B
+--[builtins fixtures/tuple.py]
+--[out]
+--CallExpr(6) : Tuple[C, B]
+--MemberExpr(6) : def (B a) -> Tuple[C, B]
+--TypeApplication(6) : def (B a) -> Tuple[C, B]
+--CallExpr(7) : Tuple[C, Any]
+--MemberExpr(7) : def (any a) -> Tuple[C, Any]
+--TypeApplication(7) : def (any a) -> Tuple[C, Any]
+
+[case testGenericTypeVariableInference]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+  def __init__(self, a: T) -> None: pass
+class B: pass
+A(A(B()))
+[out]
+CallExpr(6) : A[A[B]]
+CallExpr(6) : A[B]
+CallExpr(6) : B
+NameExpr(6) : def (a: A[B]) -> A[A[B]]
+NameExpr(6) : def (a: B) -> A[B]
+NameExpr(6) : def () -> B
+
+
+-- Generic inheritance
+-- -------------------
+
+
+[case testInheritedMethodReferenceWithGenericInheritance]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C: pass
+class A(Generic[T]):
+  def f(self, a: T) -> None: pass
+class B(A[C]):
+  def g(self, c: C) -> None:
+    self.f(c)
+[out]
+CallExpr(8) : builtins.None
+MemberExpr(8) : def (a: C)
+NameExpr(8) : C
+NameExpr(8) : B
+
+[case testInheritedMethodReferenceWithGenericSubclass]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+class C: pass
+class A(Generic[S, T]):
+  def f(self, a: C) -> None: pass
+class B(A[C, T], Generic[T]):
+  def g(self, c: C) -> None:
+    self.f(c)
+[out]
+CallExpr(9) : builtins.None
+MemberExpr(9) : def (a: C)
+NameExpr(9) : C
+NameExpr(9) : B[T`1]
+
+[case testExternalReferenceWithGenericInheritance]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C: pass
+class A(Generic[T]):
+  def f(self, a: T) -> None: pass
+class B(A[C]): pass
+b = None # type: B
+c = None # type: C
+b.f(c)
+[out]
+CallExpr(9) : builtins.None
+MemberExpr(9) : def (a: C)
+NameExpr(9) : B
+NameExpr(9) : C
+
+
+-- Implicit Any types
+-- ------------------
+
+
+[case testDynamicallyTypedFunction]
+
+def f(x):
+  y = x + o
+  z = o
+  z
+o = None # type: object
+[out]
+NameExpr(3) : builtins.object
+NameExpr(3) : Any
+NameExpr(3) : Any
+OpExpr(3) : Any
+NameExpr(4) : builtins.object
+NameExpr(4) : Any
+NameExpr(5) : Any
+
+[case testDynamicallyTypedMethod]
+
+class A:
+  def f(self, x):
+    y = (
+         o)  # Place y and o on separate lines
+    x
+    y
+o = None # type: object
+[out]
+NameExpr(4) : Any
+NameExpr(5) : builtins.object
+NameExpr(6) : Any
+NameExpr(7) : Any
+
+[case testDynamicallyTypedConstructor]
+
+class A:
+  def __init__(self, x):
+    y = o
+    x
+    y
+o = None # type: object
+[out]
+NameExpr(4) : builtins.object
+NameExpr(4) : Any
+NameExpr(5) : Any
+NameExpr(6) : Any
+
+[case testCallInDynamicallyTypedFunction]
+
+def f():
+  g(o)
+def g(a: object) -> object: pass
+o = None # type: object
+[out]
+CallExpr(3) : Any
+NameExpr(3) : def (a: builtins.object) -> builtins.object
+NameExpr(3) : builtins.object
+
+[case testExpressionInDynamicallyTypedFn]
+import typing
+def f():
+  x = None
+  x.f()
+[out]
+CallExpr(4) : Any
+MemberExpr(4) : Any
+NameExpr(4) : Any
+
+[case testGenericCall]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def f() -> None:
+  a1 = A(b) # type: A[B]
+  a2 = A(b) # type: A[object]
+class A(Generic[T]):
+  def __init__(self, a: T) -> None: pass
+class B: pass
+b = None # type: B
+[out]
+CallExpr(4) : A[B]
+NameExpr(4) : def (a: B) -> A[B]
+NameExpr(4) : B
+CallExpr(5) : A[builtins.object]
+NameExpr(5) : def (a: builtins.object) -> A[builtins.object]
+NameExpr(5) : B
+
+[case testGenericCallInDynamicallyTypedFunction]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def f():
+  A()
+class A(Generic[T]): pass
+[out]
+CallExpr(4) : Any
+NameExpr(4) : def [T] () -> A[T`1]
+
+[case testGenericCallInDynamicallyTypedFunction2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def f():
+  A(f)
+class A(Generic[T]):
+    def __init__(self, x: T) -> None: pass
+[out]
+CallExpr(4) : Any
+NameExpr(4) : def [T] (x: T`1) -> A[T`1]
+NameExpr(4) : def () -> Any
+
+[case testGenericCallInDynamicallyTypedFunction3]
+from typing import TypeVar
+t = TypeVar('t')
+def f():
+  g(None)
+def g(x: t) -> t: pass
+[out]
+CallExpr(4) : Any
+NameExpr(4) : def [t] (x: t`-1) -> t`-1
+
+
+-- Generic types and type inference
+-- --------------------------------
+
+
+[case testInferenceInArgumentContext]
+## CallExpr
+from typing import TypeVar, Generic
+T = TypeVar('T')
+f(g())
+f(h(b))
+f(h(c))
+
+b = None # type: B
+c = None # type: C
+
+def f(a: 'A[B]') -> None: pass
+
+def g() -> 'A[T]': pass
+def h(a: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+class C(B): pass
+[out]
+CallExpr(4) : builtins.None
+CallExpr(4) : A[B]
+CallExpr(5) : builtins.None
+CallExpr(5) : A[B]
+CallExpr(6) : builtins.None
+CallExpr(6) : A[B]
+
+[case testInferGenericTypeForLocalVariable]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def f() -> None:
+  a = A(b)
+  a
+  a2, a3 = A(b), A(c)
+  a2
+  a3
+b = None # type: B
+c = None # type: C
+class A(Generic[T]):
+  def __init__(self, x: T) -> None: pass
+class B: pass
+class C: pass
+[out]
+CallExpr(4) : A[B]
+NameExpr(4) : def (x: B) -> A[B]
+NameExpr(4) : A[B]
+NameExpr(4) : B
+NameExpr(5) : A[B]
+CallExpr(6) : A[B]
+CallExpr(6) : A[C]
+NameExpr(6) : def (x: B) -> A[B]
+NameExpr(6) : def (x: C) -> A[C]
+NameExpr(6) : A[B]
+NameExpr(6) : A[C]
+NameExpr(6) : B
+NameExpr(6) : C
+NameExpr(7) : A[B]
+NameExpr(8) : A[C]
+
+[case testNestedGenericCalls]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+def h() -> None:
+  g(f(c))
+
+c = None # type: C
+
+class A(Generic[T]): pass
+class B(Generic[T]): pass
+class C: pass
+def f(a: T) -> A[T]: pass
+def g(a: S) -> B[S]: pass
+[out]
+CallExpr(5) : A[C]
+CallExpr(5) : B[A[C]]
+NameExpr(5) : C
+NameExpr(5) : def (a: C) -> A[C]
+NameExpr(5) : def (a: A[C]) -> B[A[C]]
+
+[case testInferListLiterals]
+from typing import List
+a = [] # type: List[A]
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+ListExpr(2) : builtins.list[A]
+
+[case testInferGenericTypeInTypeAnyContext]
+from typing import Any
+a = [] # type: Any
+[builtins fixtures/list.pyi]
+[out]
+ListExpr(2) : builtins.list[Any]
+
+[case testHigherOrderFunction]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+map(
+    f,
+    [A()])
+def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
+class A: pass
+class B: pass
+def f(a: A) -> B: pass
+[builtins fixtures/list.pyi]
+[out]
+CallExpr(4) : builtins.list[B]
+NameExpr(4) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
+NameExpr(5) : def (a: A) -> B
+CallExpr(6) : A
+ListExpr(6) : builtins.list[A]
+NameExpr(6) : def () -> A
+
+
+-- Lambdas
+-- -------
+
+
+[case testLambdaWithTypeInferredFromContext]
+from typing import Callable
+f = lambda x: x.a # type: Callable[[B], A]
+class A: pass
+class B:
+  a = None # type: A
+[out]
+LambdaExpr(2) : def (B) -> A
+MemberExpr(2) : A
+NameExpr(2) : B
+
+[case testLambdaWithInferredType]
+## LambdaExpr|NameExpr
+import typing
+f = lambda: 1
+[out]
+LambdaExpr(3) : def () -> builtins.int
+NameExpr(3) : def () -> builtins.int
+
+[case testLambdaWithInferredType2]
+## LambdaExpr|NameExpr
+import typing
+f = lambda: [1]
+[builtins fixtures/list.pyi]
+[out]
+LambdaExpr(3) : def () -> builtins.list[builtins.int]
+NameExpr(3) : def () -> builtins.list[builtins.int]
+
+[case testLambdaWithInferredType2]
+from typing import List, Callable
+f = lambda x: [] # type: Callable[[B], List[A]]
+class A: pass
+class B:
+  a = None # type: A
+[builtins fixtures/list.pyi]
+[out]
+LambdaExpr(2) : def (B) -> builtins.list[A]
+ListExpr(2) : builtins.list[A]
+
+[case testLambdaAndHigherOrderFunction]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+l = None # type: List[A]
+map(
+  lambda x: f(x), l)
+def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
+class A: pass
+class B: pass
+def f(a: A) -> B: pass
+[builtins fixtures/list.pyi]
+[out]
+CallExpr(5) : builtins.list[B]
+NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
+CallExpr(6) : B
+LambdaExpr(6) : def (A) -> B
+NameExpr(6) : def (a: A) -> B
+NameExpr(6) : builtins.list[A]
+NameExpr(6) : A
+
+[case testLambdaAndHigherOrderFunction2]
+## LambdaExpr|NameExpr|ListExpr
+from typing import TypeVar, List, Callable
+t = TypeVar('t')
+s = TypeVar('s')
+l = None # type: List[A]
+map(
+  lambda x: [f(x)], l)
+def map(f: Callable[[t], List[s]], a: List[t]) -> List[s]: pass
+class A: pass
+class B: pass
+def f(a: A) -> B: pass
+[builtins fixtures/list.pyi]
+[out]
+NameExpr(6) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B]
+LambdaExpr(7) : def (A) -> builtins.list[B]
+ListExpr(7) : builtins.list[B]
+NameExpr(7) : def (a: A) -> B
+NameExpr(7) : builtins.list[A]
+NameExpr(7) : A
+
+[case testLambdaInListAndHigherOrderFunction]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+l = None # type: List[A]
+map(
+  [lambda x: x],
+  l)
+def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+-- TODO We probably should not silently infer 'Any' types in statically typed
+--      context. Perhaps just fail instead?
+CallExpr(5) : builtins.list[Any]
+NameExpr(5) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any]
+LambdaExpr(6) : def (A) -> A
+ListExpr(6) : builtins.list[def (A) -> Any]
+NameExpr(6) : A
+NameExpr(7) : builtins.list[A]
+
+[case testLambdaAndHigherOrderFunction3]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+l = None # type: List[A]
+map(
+  lambda x: x.b,
+  l)
+def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
+class A:
+  b = None # type: B
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+CallExpr(5) : builtins.list[B]
+NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
+LambdaExpr(6) : def (A) -> B
+MemberExpr(6) : B
+NameExpr(6) : A
+NameExpr(7) : builtins.list[A]
+
+[case testLambdaAndHigherOrderFunctionAndKeywordArgs]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+l = None # type: List[A]
+map(
+  a=l,
+  f=lambda x: x.b)
+def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
+class A:
+  b = None # type: B
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+CallExpr(5) : builtins.list[B]
+NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
+NameExpr(6) : builtins.list[A]
+LambdaExpr(7) : def (A) -> B
+MemberExpr(7) : B
+NameExpr(7) : A
+
+
+-- Boolean operations
+-- ------------------
+
+
+[case testBooleanOr]
+from typing import List
+a = None # type: List[A]
+a or []
+a = a or []
+a = [] or a
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+ListExpr(3) : builtins.list[A]
+NameExpr(3) : builtins.list[A]
+OpExpr(3) : builtins.list[A]
+ListExpr(4) : builtins.list[A]
+NameExpr(4) : builtins.list[A]
+NameExpr(4) : builtins.list[A]
+OpExpr(4) : builtins.list[A]
+ListExpr(5) : builtins.list[A]
+NameExpr(5) : builtins.list[A]
+NameExpr(5) : builtins.list[A]
+OpExpr(5) : builtins.list[A]
+
+
+-- Class attributes
+-- ----------------
+
+
+[case testUnboundMethod]
+## MemberExpr
+import typing
+class A:
+    def f(self) -> None: pass
+A.f
+[out]
+MemberExpr(5) : def (self: A)
+
+[case testUnboundMethodWithImplicitSig]
+## MemberExpr
+import typing
+class A:
+    def f(self): pass
+A.f
+[out]
+MemberExpr(5) : def (self: Any) -> Any
+
+[case testOverloadedUnboundMethod]
+## MemberExpr
+from typing import overload
+class A:
+    @overload
+    def f(self) -> None: pass
+    @overload
+    def f(self, __x: object) -> None: pass
+
+    def f(self, *args) -> None: pass
+A.f
+[out]
+MemberExpr(10) : Overload(def (self: A), def (self: A, builtins.object))
+
+[case testOverloadedUnboundMethodWithImplicitSig]
+## MemberExpr
+from typing import overload
+class A:
+    @overload
+    def f(self): pass
+    @overload
+    def f(self, __x): pass
+
+    def f(self, *args): pass
+A.f
+[out]
+MemberExpr(10) : Overload(def (self: Any) -> Any, def (self: Any, Any) -> Any)
+
+[case testUnboundMethodWithInheritance]
+## MemberExpr
+import typing
+class A:
+    def __init__(self) -> None: pass
+    def f(self) -> None: pass
+class B(A):
+    pass
+B.f
+[out]
+MemberExpr(8) : def (self: A)
+
+[case testUnboundGenericMethod]
+## MemberExpr
+from typing import TypeVar
+t = TypeVar('t')
+class B: pass
+class A:
+    def f(self, x: t) -> None: pass
+A.f(A(), B())
+[out]
+MemberExpr(7) : def (self: A, x: B)
+
+[case testUnboundMethodOfGenericClass]
+## MemberExpr
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class B: pass
+class A(Generic[t]):
+    def f(self, x: t) -> None: pass
+A.f
+a_b = A() # type: A[B]
+A.f(a_b, B())
+[out]
+MemberExpr(7) : def [t] (self: A[t`1], x: t`1)
+MemberExpr(9) : def (self: A[B], x: B)
+
+[case testUnboundOverloadedMethodOfGenericClass]
+## CallExpr
+from typing import TypeVar, Generic, overload
+t = TypeVar('t')
+class B: pass
+class A(Generic[t]):
+    @overload
+    def f(self, x: t) -> t: pass
+    @overload
+    def f(self) -> object: pass
+    def f(self, *args): pass
+
+ab, b = None, None # type: (A[B], B)
+A.f(ab, b)
+[out]
+CallExpr(13) : B
+
+[case testUnboundMethodOfGenericClassWithImplicitSig]
+## MemberExpr
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class B: pass
+class A(Generic[t]):
+    def f(self, x): pass
+A.f(None, None)
+[out]
+MemberExpr(7) : def (self: Any, x: Any) -> Any
+
+[case testGenericMethodOfGenericClass]
+## MemberExpr
+from typing import TypeVar, Generic
+t = TypeVar('t')
+s = TypeVar('s')
+class B: pass
+class A(Generic[t]):
+    def f(self, y: s) -> None: pass
+ab = None # type: A[B]
+o = None # type: object
+A.f(ab, o)
+[out]
+MemberExpr(10) : def (self: A[B], y: builtins.object)
+
+
+-- Type variables with value restriction
+-- -------------------------------------
+
+
+[case testTypeVariableWithValueRestriction]
+## NameExpr
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> None: pass
+f(1)
+f('x')
+[out]
+NameExpr(5) : def (x: builtins.int)
+NameExpr(6) : def (x: builtins.str)
+
+[case testTypeVariableWithValueRestrictionAndSubtype]
+## NameExpr|CallExpr
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T: pass
+class S(str): pass
+s = None # type: S
+f(s)
+[out]
+CallExpr(7) : builtins.str
+NameExpr(7) : def (x: builtins.str) -> builtins.str
+NameExpr(7) : S
+
+
+-- Binary operations
+-- -----------------
+
+
+[case testBinaryOperatorWithAnyLeftOperand]
+## OpExpr
+from typing import Any, cast
+class B:
+    def __add__(self, x: int) -> str: pass
+class A:
+    def __radd__(self, x: B) -> int: pass
+cast(Any, 1) + A()
+B() + A()
+[out]
+OpExpr(7) : Any
+OpExpr(8) : builtins.int
+
+[case testBinaryOperatorWithAnyRightOperand]
+## OpExpr
+from typing import Any, cast
+class A:
+    def __add__(self, x: str) -> int: pass
+A() + cast(Any, 1)
+[out]
+OpExpr(5) : Any
+
+
+-- Callable overloading
+-- --------------------
+
+
+[case testOverloadedFunctionType]
+## CallExpr
+from typing import overload
+ at overload
+def f(x: int) -> str: pass
+ at overload
+def f(x: str) -> int: pass
+def f(x): pass
+f(1)
+f('')
+[out]
+CallExpr(8) : builtins.str
+CallExpr(9) : builtins.int
+
+[case testOverlappingOverloadedFunctionType]
+## CallExpr
+from typing import overload, Any
+class A: pass
+class B(A): pass
+ at overload
+def f(x: B) -> B: pass
+ at overload
+def f(x: A) -> A: pass
+
+def f(x) -> Any: pass
+
+a = None # type: A
+b = None # type: B
+f(a)
+f(b)
+[out]
+CallExpr(14) : A
+CallExpr(15) : B
+
+
+
+[case testOverloadedErasedType]
+from typing import Callable
+from typing import List
+from typing import overload
+from typing import TypeVar
+
+T = TypeVar("T")
+V = TypeVar("V")
+
+def fun(s: int) -> int: pass
+
+def m(fun: Callable[[T], V], iter: List[T]) -> None: pass
+
+nums = [1] # type: List[int]
+m(fun,
+  nums)
+[builtins fixtures/list.pyi]
+[out]
+IntExpr(13) : builtins.int
+ListExpr(13) : builtins.list[builtins.int]
+CallExpr(14) : builtins.None
+NameExpr(14) : def (s: builtins.int) -> builtins.int
+NameExpr(14) : def (fun: def (builtins.int) -> builtins.int, iter: builtins.list[builtins.int])
+NameExpr(15) : builtins.list[builtins.int]
+
+
+-- Special cases
+-- -------------
+
+
+[case testImplicitDataAttributeInit]
+## NameExpr
+import typing
+class A:
+    def __init__(self) -> None:
+        self.x = (
+                  A())
+[out]
+NameExpr(5) : A
+NameExpr(6) : def () -> A
+
+[case testListMultiplicationInContext]
+## ListExpr|OpExpr|IntExpr
+from typing import List
+a = [None] * 3 # type: List[str]
+[builtins fixtures/list.pyi]
+[out]
+IntExpr(3) : builtins.int
+ListExpr(3) : builtins.list[builtins.str]
+OpExpr(3) : builtins.list[builtins.str]
+
+
+-- TODO
+--
+-- test expressions
+--   list literal
+--   tuple literal
+--   unary minus
+--   indexing
+--   super expression
+--   more complex lambda (multiple arguments etc.)
+--   list comprehension
+--   generator expression
+-- overloads
+-- other things
+--   type inference
+--   default argument value
+--   for loop variable
+--   exception variable
+--   varargs
+-- generics
+--   explicit types
+-- type of 'None' (currently stripped, but sometimes we may want to dump it)
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 0000000..62e2f4c
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,9 @@
+flake8
+flake8-bugbear; python_version >= '3.5'
+flake8-pyi; python_version >= '3.6'
+lxml; sys_platform != 'win32' or python_version == '3.5' or python_version == '3.6'
+typed-ast>=1.0.4,<1.1.0; sys_platform != 'win32' or python_version >= '3.5'
+pytest>=2.8
+pytest-xdist>=1.13
+pytest-cov>=2.4.0
+typing>=3.5.2; python_version < '3.5'
diff --git a/tmp-test-dirs/.gitignore b/tmp-test-dirs/.gitignore
new file mode 100644
index 0000000..e6579d8
--- /dev/null
+++ b/tmp-test-dirs/.gitignore
@@ -0,0 +1,4 @@
+# This directory is used to store temporary directories for the testsuite.
+# If anything manages to exist here, it means python crashed instead of
+# calling tempfile.TemporaryDirectory's cleanup while unwinding.
+# Therefore, don't actually provide any ignore patterns.
diff --git a/typeshed/stdlib/2and3/csv.pyi b/typeshed/stdlib/2and3/csv.pyi
index a61edcb..8f6b57d 100644
--- a/typeshed/stdlib/2and3/csv.pyi
+++ b/typeshed/stdlib/2and3/csv.pyi
@@ -52,31 +52,27 @@ if sys.version_info >= (3,):
         quoting = ...  # type: int
 
 if sys.version_info >= (3, 6):
-    class DictReader(Iterator[OrderedDict[str, str]]):
-        restkey = ...  # type: Optional[str]
-        restval = ...  # type: Optional[str]
-        reader = ...  # type: _reader
-        dialect = ...  # type: _Dialect
-        line_num = ...  # type: int
-        fieldnames = ...  # type: Sequence[str]
-        def __init__(self, f: Iterator[str], fieldnames: Sequence[str] = ...,
-                     restkey: Optional[str] = ..., restval: Optional[str] = ..., dialect: _Dialect = ...,
-                     *args: Any, **kwds: Any) -> None: ...
-        def __iter__(self) -> Iterator[OrderedDict[str, str]]: ...
-        def next(self) -> OrderedDict[str, str]: ...
+    _DRMapping = OrderedDict[str, str]
 else:
-    class DictReader(Iterator[Dict[Any, str]]):
-        restkey = ...  # type: Optional[str]
-        restval = ...  # type: Optional[str]
-        reader = ...  # type: _reader
-        dialect = ...  # type: _Dialect
-        line_num = ...  # type: int
-        fieldnames = ...  # type: Sequence[str]
-        def __init__(self, f: Iterator[str], fieldnames: Sequence[str] = ...,
-                     restkey: Optional[str] = ..., restval: Optional[str] = ..., dialect: _Dialect = ...,
-                     *args: Any, **kwds: Any) -> None: ...
-        def __iter__(self) -> Iterator[OrderedDict[Any, str]]: ...
-        def next(self) -> OrderedDict[Any, str]: ...
+    _DRMapping = Dict[str, str]
+
+
+class DictReader(Iterator[_DRMapping]):
+    restkey = ...  # type: Optional[str]
+    restval = ...  # type: Optional[str]
+    reader = ...  # type: _reader
+    dialect = ...  # type: _Dialect
+    line_num = ...  # type: int
+    fieldnames = ...  # type: Sequence[str]
+    def __init__(self, f: Iterable[str], fieldnames: Sequence[str] = ...,
+                 restkey: Optional[str] = ..., restval: Optional[str] = ..., dialect: _Dialect = ...,
+                 *args: Any, **kwds: Any) -> None: ...
+    def __iter__(self) -> 'DictReader': ...
+    if sys.version_info >= (3,):
+        def __next__(self) -> _DRMapping: ...
+    else:
+        def next(self) -> _DRMapping: ...
+
 
 class DictWriter(object):
     fieldnames = ...  # type: Sequence[str]

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git



More information about the debian-med-commit mailing list